Name : Jayesh Chaudhari¶

Roll No:22122017¶

MSCDS A¶

Import libraries and packages¶

In [2]:
import os
import pandas as pd
import numpy as np
import math
import datetime as dt

from sklearn.metrics import mean_squared_error, mean_absolute_error, explained_variance_score, r2_score 
from sklearn.metrics import mean_poisson_deviance, mean_gamma_deviance, accuracy_score
from sklearn.preprocessing import MinMaxScaler

import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.layers import LSTM, GRU

from itertools import cycle
import plotly.graph_objects as go
import plotly.express as px
from plotly.subplots import make_subplots

Import Dataset¶

In [3]:
maindf = pd.read_csv('TSLA.csv')
maindf = maindf.rename(columns={'Date': 'date','Open':'open','High':'high','Low':'low','Close':'close',
                                'Adj Close':'adj_close','Volume':'volume'})
maindf.head()
Out[3]:
date open high low close adj_close volume
0 2018-03-01 23.000668 23.244667 22.004667 22.062000 22.062000 103284000
1 2018-03-02 21.798668 22.348000 21.531334 22.341333 22.341333 76392000
2 2018-03-05 22.159332 22.516666 21.952667 22.223333 22.223333 57357000
3 2018-03-06 22.250000 22.424667 21.802000 21.879999 21.879999 64285500
4 2018-03-07 21.695999 22.166668 21.449333 22.153334 22.153334 75109500
In [4]:
print("Total number of days: ",maindf.shape[0])
print("Total number of fields: ",maindf.shape[1])
Total number of days:  1516
Total number of fields:  7

Checking Null and NA value¶

In [5]:
print("Null values:", maindf.isnull().values.sum())
print("NA values:", maindf.isna().values.any())
Null values: 0
NA values: False

Convert Date field into datetime format¶

In [6]:
# convert date field from string to Date format 
maindf['date'] = pd.to_datetime(maindf.date)
maindf.head()
Out[6]:
date open high low close adj_close volume
0 2018-03-01 23.000668 23.244667 22.004667 22.062000 22.062000 103284000
1 2018-03-02 21.798668 22.348000 21.531334 22.341333 22.341333 76392000
2 2018-03-05 22.159332 22.516666 21.952667 22.223333 22.223333 57357000
3 2018-03-06 22.250000 22.424667 21.802000 21.879999 21.879999 64285500
4 2018-03-07 21.695999 22.166668 21.449333 22.153334 22.153334 75109500

EDA - Exploratory Data Analysis¶

Find the duration of dataset¶

In [7]:
print("Starting date: ",maindf.iloc[0][0])
print("Ending date: ", maindf.iloc[-1][0])
print("Duration: ", maindf.iloc[-1][0]-maindf.iloc[0][0])
Starting date:  2018-03-01 00:00:00
Ending date:  2024-03-08 00:00:00
Duration:  2199 days 00:00:00

Monthwise comparision between Stock open and close price¶

In [8]:
monthvise= maindf.groupby(maindf['date'].dt.strftime('%B'))[['open','close']].mean()
new_order = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 
             'September', 'October', 'November', 'December']
monthvise = monthvise.reindex(new_order, axis=0)
monthvise
Out[8]:
open close
date
January 167.399514 167.147961
February 170.523547 170.853234
March 137.781746 137.662244
April 136.521237 135.664754
May 119.523769 119.308845
June 132.286489 132.660194
July 143.081328 143.220201
August 155.879691 155.864782
September 164.233251 164.416334
October 151.656369 151.808359
November 167.124763 167.063790
December 170.556683 170.210517
In [9]:
fig = go.Figure()

fig.add_trace(go.Bar(
    x=monthvise.index,
    y=monthvise['open'],
    name='Stock Open Price',
    marker_color='crimson'
))
fig.add_trace(go.Bar(
    x=monthvise.index,
    y=monthvise['close'],
    name='Stock Close Price',
    marker_color='lightsalmon'
))

fig.update_layout(barmode='group', xaxis_tickangle=-45, 
                  title='Monthwise comparision between Stock open and close price')
fig.show()

Monthwise High and Low stock price¶

In [10]:
maindf.groupby(maindf['date'].dt.strftime('%B'))['low'].min()
monthvise_high = maindf.groupby(maindf['date'].dt.strftime('%B'))['high'].max()
monthvise_high = monthvise_high.reindex(new_order, axis=0)

monthvise_low = maindf.groupby(maindf['date'].dt.strftime('%B'))['low'].min()
monthvise_low = monthvise_low.reindex(new_order, axis=0)

fig = go.Figure()
fig.add_trace(go.Bar(
    x=monthvise_high.index,
    y=monthvise_high,
    name='Stock high Price',
    marker_color='rgb(0, 153, 204)'
))
fig.add_trace(go.Bar(
    x=monthvise_low.index,
    y=monthvise_low,
    name='Stock low Price',
    marker_color='rgb(255, 128, 0)'
))

fig.update_layout(barmode='group', 
                  title=' Monthwise High and Low stock price')
fig.show()

Trend comparision between stock open price, close price, high price, low price¶

In [11]:
names = cycle(['Stock Open Price','Stock Close Price','Stock High Price','Stock Low Price'])

fig = px.line(maindf, x=maindf.date, y=[maindf['open'], maindf['close'], 
                                          maindf['high'], maindf['low']],
             labels={'date': 'Date','value':'Stock value'})
fig.update_layout(title_text='Stock analysis chart', font_size=15, font_color='black',legend_title_text='Stock Parameters')
fig.for_each_trace(lambda t:  t.update(name = next(names)))
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)

fig.show()

Here, We are going to predict close price for next 30 days¶

Prepare Stock Close price¶

Make separate dataframe of close price¶

In [12]:
closedf = maindf[['date','close']]
print("Shape of close dataframe:", closedf.shape)
Shape of close dataframe: (1516, 2)

Plotting Stock Close price chart¶

In [56]:
fig = px.line(closedf, x=closedf.date, y=closedf.close,labels={'date':'Date','close':'Close Stock'})
fig.update_traces(marker_line_width=2, opacity=0.8)
fig.update_layout(title_text='Stock close price chart', plot_bgcolor='white', font_size=15, font_color='black')
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()

Consider only last 1 year data for prediction¶

In [57]:
closedf = closedf[closedf['date'] > '2023-03-08']
close_stock = closedf.copy()
print("Total data for prediction: ",closedf.shape[0])
Total data for prediction:  252
In [58]:
fig = px.line(closedf, x=closedf.date, y=closedf.close,labels={'date':'Date','close':'Close Stock'})
fig.update_traces(marker_line_width=2, opacity=0.8, marker_line_color='orange')
fig.update_layout(title_text='Considered period to predict Stock close price', plot_bgcolor='white', font_size=15, font_color='black')
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()

Normalizing close price¶

In [59]:
del closedf['date']
scaler=MinMaxScaler(feature_range=(0,1))
closedf=scaler.fit_transform(np.array(closedf).reshape(-1,1))
print(closedf.shape)
(252, 1)

Prepare Data for train and test¶

In [60]:
training_size=int(len(closedf)*0.60)
test_size=len(closedf)-training_size
train_data,test_data=closedf[0:training_size,:],closedf[training_size:len(closedf),:1]
print("train_data: ", train_data.shape)
print("test_data: ", test_data.shape) 
train_data:  (151, 1)
test_data:  (101, 1)
In [61]:
# convert an array of values into a dataset matrix
def create_dataset(dataset, time_step=1):
    dataX, dataY = [], []
    for i in range(len(dataset)-time_step-1):
        a = dataset[i:(i+time_step), 0]   ###i=0, 0,1,2,3-----99   100 
        dataX.append(a)
        dataY.append(dataset[i + time_step, 0])
    return np.array(dataX), np.array(dataY)
In [62]:
time_step = 15
X_train, y_train = create_dataset(train_data, time_step)
X_test, y_test = create_dataset(test_data, time_step)

print("X_train: ", X_train.shape)
print("y_train: ", y_train.shape)
print("X_test: ", X_test.shape)
print("y_test", y_test.shape)
X_train:  (135, 15)
y_train:  (135,)
X_test:  (85, 15)
y_test (85,)
In [63]:
# reshape input to be [samples, time steps, features] which is required for LSTM
X_train =X_train.reshape(X_train.shape[0],X_train.shape[1] , 1)
X_test = X_test.reshape(X_test.shape[0],X_test.shape[1] , 1)

print("X_train: ", X_train.shape)
print("X_test: ", X_test.shape)
X_train:  (135, 15, 1)
X_test:  (85, 15, 1)

Model Building (GRU)¶

GRUs (Gated Recurrent Units) are very similar to Long Short Term Memory(LSTM). Just like LSTM, GRU uses gates to control the flow of information. They are relatively new as compared to LSTM. This is the reason they offer some improvement over LSTM and have simpler architecture.¶

In [64]:
tf.keras.backend.clear_session()
model=Sequential()
model.add(GRU(32,return_sequences=True,input_shape=(time_step,1)))
model.add(GRU(32,return_sequences=True))
model.add(GRU(32))
model.add(Dropout(0.20))
model.add(Dense(1))
model.compile(loss='mean_squared_error',optimizer='adam')
In [65]:
model.summary()
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 gru (GRU)                   (None, 15, 32)            3360      
                                                                 
 gru_1 (GRU)                 (None, 15, 32)            6336      
                                                                 
 gru_2 (GRU)                 (None, 32)                6336      
                                                                 
 dropout (Dropout)           (None, 32)                0         
                                                                 
 dense (Dense)               (None, 1)                 33        
                                                                 
=================================================================
Total params: 16,065
Trainable params: 16,065
Non-trainable params: 0
_________________________________________________________________
In [66]:
history = model.fit(X_train,y_train,validation_data=(X_test,y_test),epochs=200,batch_size=32,verbose=1)
Epoch 1/200
5/5 [==============================] - 13s 561ms/step - loss: 0.2641 - val_loss: 0.0401
Epoch 2/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0449 - val_loss: 0.0458
Epoch 3/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0614 - val_loss: 0.0412
Epoch 4/200
5/5 [==============================] - 0s 43ms/step - loss: 0.0273 - val_loss: 0.0100
Epoch 5/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0247 - val_loss: 0.0120
Epoch 6/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0296 - val_loss: 0.0088
Epoch 7/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0211 - val_loss: 0.0149
Epoch 8/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0229 - val_loss: 0.0170
Epoch 9/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0172 - val_loss: 0.0086
Epoch 10/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0177 - val_loss: 0.0067
Epoch 11/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0148 - val_loss: 0.0063
Epoch 12/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0140 - val_loss: 0.0064
Epoch 13/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0152 - val_loss: 0.0076
Epoch 14/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0125 - val_loss: 0.0068
Epoch 15/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0105 - val_loss: 0.0051
Epoch 16/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0120 - val_loss: 0.0050
Epoch 17/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0122 - val_loss: 0.0071
Epoch 18/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0147 - val_loss: 0.0066
Epoch 19/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0142 - val_loss: 0.0049
Epoch 20/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0116 - val_loss: 0.0046
Epoch 21/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0116 - val_loss: 0.0050
Epoch 22/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0101 - val_loss: 0.0049
Epoch 23/200
5/5 [==============================] - 0s 44ms/step - loss: 0.0102 - val_loss: 0.0046
Epoch 24/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0089 - val_loss: 0.0043
Epoch 25/200
5/5 [==============================] - 0s 44ms/step - loss: 0.0080 - val_loss: 0.0050
Epoch 26/200
5/5 [==============================] - 0s 52ms/step - loss: 0.0100 - val_loss: 0.0050
Epoch 27/200
5/5 [==============================] - 0s 43ms/step - loss: 0.0098 - val_loss: 0.0041
Epoch 28/200
5/5 [==============================] - 0s 45ms/step - loss: 0.0089 - val_loss: 0.0042
Epoch 29/200
5/5 [==============================] - 0s 45ms/step - loss: 0.0091 - val_loss: 0.0057
Epoch 30/200
5/5 [==============================] - 0s 43ms/step - loss: 0.0108 - val_loss: 0.0039
Epoch 31/200
5/5 [==============================] - 0s 44ms/step - loss: 0.0096 - val_loss: 0.0038
Epoch 32/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0089 - val_loss: 0.0053
Epoch 33/200
5/5 [==============================] - 0s 45ms/step - loss: 0.0103 - val_loss: 0.0038
Epoch 34/200
5/5 [==============================] - 0s 44ms/step - loss: 0.0106 - val_loss: 0.0037
Epoch 35/200
5/5 [==============================] - 0s 43ms/step - loss: 0.0111 - val_loss: 0.0038
Epoch 36/200
5/5 [==============================] - 0s 45ms/step - loss: 0.0082 - val_loss: 0.0042
Epoch 37/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0087 - val_loss: 0.0036
Epoch 38/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0093 - val_loss: 0.0035
Epoch 39/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0083 - val_loss: 0.0043
Epoch 40/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0099 - val_loss: 0.0034
Epoch 41/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0074 - val_loss: 0.0035
Epoch 42/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0093 - val_loss: 0.0049
Epoch 43/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0087 - val_loss: 0.0034
Epoch 44/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0091 - val_loss: 0.0035
Epoch 45/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0077 - val_loss: 0.0037
Epoch 46/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0086 - val_loss: 0.0032
Epoch 47/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0082 - val_loss: 0.0036
Epoch 48/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0054 - val_loss: 0.0035
Epoch 49/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0071 - val_loss: 0.0040
Epoch 50/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0071 - val_loss: 0.0031
Epoch 51/200
5/5 [==============================] - 0s 44ms/step - loss: 0.0066 - val_loss: 0.0033
Epoch 52/200
5/5 [==============================] - 0s 53ms/step - loss: 0.0067 - val_loss: 0.0032
Epoch 53/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0078 - val_loss: 0.0030
Epoch 54/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0076 - val_loss: 0.0034
Epoch 55/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0072 - val_loss: 0.0031
Epoch 56/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0076 - val_loss: 0.0030
Epoch 57/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0067 - val_loss: 0.0041
Epoch 58/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0072 - val_loss: 0.0033
Epoch 59/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0089 - val_loss: 0.0030
Epoch 60/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0064 - val_loss: 0.0028
Epoch 61/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0064 - val_loss: 0.0036
Epoch 62/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0065 - val_loss: 0.0028
Epoch 63/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0067 - val_loss: 0.0028
Epoch 64/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0069 - val_loss: 0.0035
Epoch 65/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0060 - val_loss: 0.0029
Epoch 66/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0076 - val_loss: 0.0032
Epoch 67/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0086 - val_loss: 0.0030
Epoch 68/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0062 - val_loss: 0.0042
Epoch 69/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0080 - val_loss: 0.0028
Epoch 70/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0065 - val_loss: 0.0027
Epoch 71/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0057 - val_loss: 0.0031
Epoch 72/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0057 - val_loss: 0.0036
Epoch 73/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0060 - val_loss: 0.0027
Epoch 74/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0078 - val_loss: 0.0037
Epoch 75/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0082 - val_loss: 0.0030
Epoch 76/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0062 - val_loss: 0.0034
Epoch 77/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0086 - val_loss: 0.0036
Epoch 78/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0078 - val_loss: 0.0041
Epoch 79/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0069 - val_loss: 0.0033
Epoch 80/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0092 - val_loss: 0.0036
Epoch 81/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0076 - val_loss: 0.0042
Epoch 82/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0067 - val_loss: 0.0026
Epoch 83/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0085 - val_loss: 0.0041
Epoch 84/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0067 - val_loss: 0.0031
Epoch 85/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0064 - val_loss: 0.0025
Epoch 86/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0066 - val_loss: 0.0038
Epoch 87/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0080 - val_loss: 0.0035
Epoch 88/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0059 - val_loss: 0.0029
Epoch 89/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0070 - val_loss: 0.0031
Epoch 90/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0066 - val_loss: 0.0032
Epoch 91/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0071 - val_loss: 0.0026
Epoch 92/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0078 - val_loss: 0.0043
Epoch 93/200
5/5 [==============================] - 0s 43ms/step - loss: 0.0074 - val_loss: 0.0026
Epoch 94/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0053 - val_loss: 0.0025
Epoch 95/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0064 - val_loss: 0.0025
Epoch 96/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0056 - val_loss: 0.0026
Epoch 97/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0053 - val_loss: 0.0031
Epoch 98/200
5/5 [==============================] - 0s 44ms/step - loss: 0.0054 - val_loss: 0.0024
Epoch 99/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0063 - val_loss: 0.0027
Epoch 100/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0066 - val_loss: 0.0025
Epoch 101/200
5/5 [==============================] - 0s 44ms/step - loss: 0.0053 - val_loss: 0.0028
Epoch 102/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0062 - val_loss: 0.0024
Epoch 103/200
5/5 [==============================] - 0s 43ms/step - loss: 0.0052 - val_loss: 0.0024
Epoch 104/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0056 - val_loss: 0.0042
Epoch 105/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0068 - val_loss: 0.0024
Epoch 106/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0068 - val_loss: 0.0030
Epoch 107/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0048 - val_loss: 0.0023
Epoch 108/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0054 - val_loss: 0.0023
Epoch 109/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0057 - val_loss: 0.0025
Epoch 110/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0055 - val_loss: 0.0030
Epoch 111/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0047 - val_loss: 0.0024
Epoch 112/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0050 - val_loss: 0.0039
Epoch 113/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0056 - val_loss: 0.0026
Epoch 114/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0058 - val_loss: 0.0026
Epoch 115/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0054 - val_loss: 0.0025
Epoch 116/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0058 - val_loss: 0.0023
Epoch 117/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0051 - val_loss: 0.0025
Epoch 118/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0065 - val_loss: 0.0029
Epoch 119/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0063 - val_loss: 0.0023
Epoch 120/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0049 - val_loss: 0.0034
Epoch 121/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0059 - val_loss: 0.0024
Epoch 122/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0061 - val_loss: 0.0023
Epoch 123/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0060 - val_loss: 0.0025
Epoch 124/200
5/5 [==============================] - 0s 36ms/step - loss: 0.0061 - val_loss: 0.0023
Epoch 125/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0075 - val_loss: 0.0027
Epoch 126/200
5/5 [==============================] - 0s 36ms/step - loss: 0.0060 - val_loss: 0.0034
Epoch 127/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0060 - val_loss: 0.0023
Epoch 128/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0059 - val_loss: 0.0023
Epoch 129/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0058 - val_loss: 0.0024
Epoch 130/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0069 - val_loss: 0.0033
Epoch 131/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0048 - val_loss: 0.0023
Epoch 132/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0041 - val_loss: 0.0036
Epoch 133/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0057 - val_loss: 0.0024
Epoch 134/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0071 - val_loss: 0.0023
Epoch 135/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0059 - val_loss: 0.0036
Epoch 136/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0059 - val_loss: 0.0023
Epoch 137/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0067 - val_loss: 0.0035
Epoch 138/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0049 - val_loss: 0.0023
Epoch 139/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0052 - val_loss: 0.0025
Epoch 140/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0072 - val_loss: 0.0031
Epoch 141/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0042 - val_loss: 0.0023
Epoch 142/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0063 - val_loss: 0.0025
Epoch 143/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0059 - val_loss: 0.0032
Epoch 144/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0073 - val_loss: 0.0028
Epoch 145/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0074 - val_loss: 0.0032
Epoch 146/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0053 - val_loss: 0.0023
Epoch 147/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0059 - val_loss: 0.0025
Epoch 148/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0064 - val_loss: 0.0028
Epoch 149/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0042 - val_loss: 0.0024
Epoch 150/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0067 - val_loss: 0.0031
Epoch 151/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0052 - val_loss: 0.0023
Epoch 152/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0065 - val_loss: 0.0022
Epoch 153/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0038 - val_loss: 0.0027
Epoch 154/200
5/5 [==============================] - 0s 44ms/step - loss: 0.0058 - val_loss: 0.0023
Epoch 155/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0047 - val_loss: 0.0028
Epoch 156/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0057 - val_loss: 0.0022
Epoch 157/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0048 - val_loss: 0.0023
Epoch 158/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0058 - val_loss: 0.0034
Epoch 159/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0055 - val_loss: 0.0022
Epoch 160/200
5/5 [==============================] - 0s 44ms/step - loss: 0.0061 - val_loss: 0.0022
Epoch 161/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0058 - val_loss: 0.0032
Epoch 162/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0061 - val_loss: 0.0022
Epoch 163/200
5/5 [==============================] - 0s 47ms/step - loss: 0.0050 - val_loss: 0.0030
Epoch 164/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0056 - val_loss: 0.0022
Epoch 165/200
5/5 [==============================] - 0s 46ms/step - loss: 0.0050 - val_loss: 0.0024
Epoch 166/200
5/5 [==============================] - 0s 44ms/step - loss: 0.0058 - val_loss: 0.0023
Epoch 167/200
5/5 [==============================] - 0s 45ms/step - loss: 0.0049 - val_loss: 0.0021
Epoch 168/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0055 - val_loss: 0.0026
Epoch 169/200
5/5 [==============================] - 0s 45ms/step - loss: 0.0057 - val_loss: 0.0024
Epoch 170/200
5/5 [==============================] - 0s 43ms/step - loss: 0.0057 - val_loss: 0.0022
Epoch 171/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0058 - val_loss: 0.0029
Epoch 172/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0054 - val_loss: 0.0022
Epoch 173/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0049 - val_loss: 0.0025
Epoch 174/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0050 - val_loss: 0.0022
Epoch 175/200
5/5 [==============================] - 0s 43ms/step - loss: 0.0052 - val_loss: 0.0022
Epoch 176/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0055 - val_loss: 0.0026
Epoch 177/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0050 - val_loss: 0.0023
Epoch 178/200
5/5 [==============================] - 0s 43ms/step - loss: 0.0056 - val_loss: 0.0023
Epoch 179/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0056 - val_loss: 0.0024
Epoch 180/200
5/5 [==============================] - 0s 43ms/step - loss: 0.0062 - val_loss: 0.0029
Epoch 181/200
5/5 [==============================] - 0s 42ms/step - loss: 0.0053 - val_loss: 0.0028
Epoch 182/200
5/5 [==============================] - 0s 43ms/step - loss: 0.0055 - val_loss: 0.0022
Epoch 183/200
5/5 [==============================] - 0s 39ms/step - loss: 0.0055 - val_loss: 0.0021
Epoch 184/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0062 - val_loss: 0.0024
Epoch 185/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0053 - val_loss: 0.0022
Epoch 186/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0048 - val_loss: 0.0027
Epoch 187/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0046 - val_loss: 0.0023
Epoch 188/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0049 - val_loss: 0.0021
Epoch 189/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0053 - val_loss: 0.0027
Epoch 190/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0050 - val_loss: 0.0023
Epoch 191/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0044 - val_loss: 0.0022
Epoch 192/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0043 - val_loss: 0.0023
Epoch 193/200
5/5 [==============================] - 0s 41ms/step - loss: 0.0053 - val_loss: 0.0022
Epoch 194/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0069 - val_loss: 0.0021
Epoch 195/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0055 - val_loss: 0.0034
Epoch 196/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0060 - val_loss: 0.0022
Epoch 197/200
5/5 [==============================] - 0s 38ms/step - loss: 0.0057 - val_loss: 0.0041
Epoch 198/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0057 - val_loss: 0.0023
Epoch 199/200
5/5 [==============================] - 0s 40ms/step - loss: 0.0047 - val_loss: 0.0022
Epoch 200/200
5/5 [==============================] - 0s 37ms/step - loss: 0.0054 - val_loss: 0.0030

Prepare Data for train and test¶

In [67]:
import matplotlib.pyplot as plt

loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(loss))

plt.plot(epochs, loss, 'r', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend(loc=0)
plt.figure()


plt.show()
<Figure size 432x288 with 0 Axes>
In [68]:
### Lets Do the prediction and check performance metrics
train_predict=model.predict(X_train)
test_predict=model.predict(X_test)
train_predict.shape, test_predict.shape
5/5 [==============================] - 2s 6ms/step
3/3 [==============================] - 0s 7ms/step
Out[68]:
((135, 1), (85, 1))
In [ ]:
 

Model Evaluation¶

In [69]:
# Transform back to original form

train_predict = scaler.inverse_transform(train_predict)
test_predict = scaler.inverse_transform(test_predict)
original_ytrain = scaler.inverse_transform(y_train.reshape(-1,1)) 
original_ytest = scaler.inverse_transform(y_test.reshape(-1,1)) 

Evaluation metrices RMSE, MSE and MAE¶

Root Mean Square Error (RMSE), Mean Square Error (MSE) and Mean absolute Error (MAE) are a standard way to measure the error of a model in predicting quantitative data.¶

In [70]:
# Evaluation metrices RMSE and MAE
print("Train data RMSE: ", math.sqrt(mean_squared_error(original_ytrain,train_predict)))
print("Train data MSE: ", mean_squared_error(original_ytrain,train_predict))
print("Train data MAE: ", mean_absolute_error(original_ytrain,train_predict))
print("-------------------------------------------------------------------------------------")
print("Test data RMSE: ", math.sqrt(mean_squared_error(original_ytest,test_predict)))
print("Test data MSE: ", mean_squared_error(original_ytest,test_predict))
print("Test data MAE: ", mean_absolute_error(original_ytest,test_predict))
Train data RMSE:  7.507432723231685
Train data MSE:  56.36154609384992
Train data MAE:  5.719833239901621
-------------------------------------------------------------------------------------
Test data RMSE:  7.635885836609775
Test data MSE:  58.30675250973776
Test data MAE:  5.8154804967026665

Explained variance regression score¶

The explained variance score explains the dispersion of errors of a given dataset, and the formula is written as follows: Here, and Var(y) is the variance of prediction errors and actual values respectively. Scores close to 1.0 are highly desired, indicating better squares of standard deviations of errors.¶

In [71]:
print("Train data explained variance regression score:", explained_variance_score(original_ytrain, train_predict))
print("Test data explained variance regression score:", explained_variance_score(original_ytest, test_predict))
Train data explained variance regression score: 0.966472188786208
Test data explained variance regression score: 0.9335442948913505

R2 score for regression¶

R-squared (R2) is a statistical measure that represents the proportion of the variance for a dependent variable that's explained by an independent variable or variables in a regression model.¶

1 = Best¶

0 or < 0 = worse¶

In [72]:
print("Train data R2 score:", r2_score(original_ytrain, train_predict))
print("Test data R2 score:", r2_score(original_ytest, test_predict))
Train data R2 score: 0.9637194224000021
Test data R2 score: 0.9029513588191602

Regression Loss Mean Gamma deviance regression loss (MGD) and Mean Poisson deviance regression loss (MPD¶

In [73]:
print("Train data MGD: ", mean_gamma_deviance(original_ytrain, train_predict))
print("Test data MGD: ", mean_gamma_deviance(original_ytest, test_predict))
print("----------------------------------------------------------------------")
print("Train data MPD: ", mean_poisson_deviance(original_ytrain, train_predict))
print("Test data MPD: ", mean_poisson_deviance(original_ytest, test_predict))
Train data MGD:  0.0010756578441746185
Test data MGD:  0.0012704307967323495
----------------------------------------------------------------------
Train data MPD:  0.24238117906183637
Test data MPD:  0.27069876636371715

Comparision of original stock close price and predicted close price¶

In [74]:
# shift train predictions for plotting

look_back=time_step
trainPredictPlot = np.empty_like(closedf)
trainPredictPlot[:, :] = np.nan
trainPredictPlot[look_back:len(train_predict)+look_back, :] = train_predict
print("Train predicted data: ", trainPredictPlot.shape)

# shift test predictions for plotting
testPredictPlot = np.empty_like(closedf)
testPredictPlot[:, :] = np.nan
testPredictPlot[len(train_predict)+(look_back*2)+1:len(closedf)-1, :] = test_predict
print("Test predicted data: ", testPredictPlot.shape)

names = cycle(['Original close price','Train predicted close price','Test predicted close price'])


plotdf = pd.DataFrame({'date': close_stock['date'],
                       'original_close': close_stock['close'],
                      'train_predicted_close': trainPredictPlot.reshape(1,-1)[0].tolist(),
                      'test_predicted_close': testPredictPlot.reshape(1,-1)[0].tolist()})

fig = px.line(plotdf,x=plotdf['date'], y=[plotdf['original_close'],plotdf['train_predicted_close'],
                                          plotdf['test_predicted_close']],
              labels={'value':'Stock price','date': 'Date'})
fig.update_layout(title_text='Comparision between original close price vs predicted close price',
                  plot_bgcolor='white', font_size=15, font_color='black', legend_title_text='Close Price')
fig.for_each_trace(lambda t:  t.update(name = next(names)))

fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()
Train predicted data:  (252, 1)
Test predicted data:  (252, 1)

Predicting next 30 days¶

In [75]:
x_input=test_data[len(test_data)-time_step:].reshape(1,-1)
temp_input=list(x_input)
temp_input=temp_input[0].tolist()

from numpy import array

lst_output=[]
n_steps=time_step
i=0
pred_days = 30
while(i<pred_days):
    
    if(len(temp_input)>time_step):
        
        x_input=np.array(temp_input[1:])
        #print("{} day input {}".format(i,x_input))
        x_input = x_input.reshape(1,-1)
        x_input = x_input.reshape((1, n_steps, 1))
        
        yhat = model.predict(x_input, verbose=0)
        #print("{} day output {}".format(i,yhat))
        temp_input.extend(yhat[0].tolist())
        temp_input=temp_input[1:]
        #print(temp_input)
       
        lst_output.extend(yhat.tolist())
        i=i+1
        
    else:
        
        x_input = x_input.reshape((1, n_steps,1))
        yhat = model.predict(x_input, verbose=0)
        temp_input.extend(yhat[0].tolist())
        
        lst_output.extend(yhat.tolist())
        i=i+1
               
print("Output of predicted next days: ", len(lst_output))
Output of predicted next days:  30

Plotting last 15 days of dataset and next predicted 30 days¶

In [76]:
last_days=np.arange(1,time_step+1)
day_pred=np.arange(time_step+1,time_step+pred_days+1)
print(last_days)
print(day_pred)
[ 1  2  3  4  5  6  7  8  9 10 11 12 13 14 15]
[16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39
 40 41 42 43 44 45]
In [77]:
temp_mat = np.empty((len(last_days)+pred_days+1,1))
temp_mat[:] = np.nan
temp_mat = temp_mat.reshape(1,-1).tolist()[0]

last_original_days_value = temp_mat
next_predicted_days_value = temp_mat

last_original_days_value[0:time_step+1] = scaler.inverse_transform(closedf[len(closedf)-time_step:]).reshape(1,-1).tolist()[0]
next_predicted_days_value[time_step+1:] = scaler.inverse_transform(np.array(lst_output).reshape(-1,1)).reshape(1,-1).tolist()[0]

new_pred_plot = pd.DataFrame({
    'last_original_days_value':last_original_days_value,
    'next_predicted_days_value':next_predicted_days_value
})

names = cycle(['Last 15 days close price','Predicted next 30 days close price'])

fig = px.line(new_pred_plot,x=new_pred_plot.index, y=[new_pred_plot['last_original_days_value'],
                                                      new_pred_plot['next_predicted_days_value']],
              labels={'value': 'Stock price','index': 'Timestamp'})
fig.update_layout(title_text='Compare last 15 days vs next 30 days',
                  plot_bgcolor='white', font_size=15, font_color='black',legend_title_text='Close Price')

fig.for_each_trace(lambda t:  t.update(name = next(names)))
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()

Plotting entire Closing Stock Price with next 30 days period of prediction¶

In [78]:
lstmdf=closedf.tolist()
lstmdf.extend((np.array(lst_output).reshape(-1,1)).tolist())
lstmdf=scaler.inverse_transform(lstmdf).reshape(1,-1).tolist()[0]

names = cycle(['Close price'])

fig = px.line(lstmdf,labels={'value': 'Stock price','index': 'Timestamp'})
fig.update_layout(title_text='Plotting whole closing stock price with prediction',
                  plot_bgcolor='white', font_size=15, font_color='black',legend_title_text='Stock')

fig.for_each_trace(lambda t:  t.update(name = next(names)))

fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()
In [ ]:
 

LSTM¶

In [79]:
import pandas as pd
import numpy as np
import math
import datetime as dt
from sklearn.metrics import mean_squared_error, mean_absolute_error, explained_variance_score, r2_score 
from sklearn.metrics import mean_poisson_deviance, mean_gamma_deviance, accuracy_score
from sklearn.preprocessing import MinMaxScaler

import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import LSTM, GRU

from itertools import cycle

# ! pip install plotly
import plotly.graph_objects as go
import plotly.express as px
from plotly.subplots import make_subplots
In [80]:
# reshape input to be [samples, time steps, features] which is required for LSTM
X_train =X_train.reshape(X_train.shape[0],X_train.shape[1] , 1)
X_test = X_test.reshape(X_test.shape[0],X_test.shape[1] , 1)

print("X_train: ", X_train.shape)
print("X_test: ", X_test.shape)
X_train:  (135, 15, 1)
X_test:  (85, 15, 1)

LSTM model structure¶

In [81]:
tf.keras.backend.clear_session()
model=Sequential()
model.add(LSTM(32,return_sequences=True,input_shape=(time_step,1)))
model.add(LSTM(32,return_sequences=True))
model.add(LSTM(32))
model.add(Dense(1))
model.compile(loss='mean_squared_error',optimizer='adam')
In [82]:
model.summary()
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 lstm (LSTM)                 (None, 15, 32)            4352      
                                                                 
 lstm_1 (LSTM)               (None, 15, 32)            8320      
                                                                 
 lstm_2 (LSTM)               (None, 32)                8320      
                                                                 
 dense (Dense)               (None, 1)                 33        
                                                                 
=================================================================
Total params: 21,025
Trainable params: 21,025
Non-trainable params: 0
_________________________________________________________________
In [83]:
model.fit(X_train,y_train,validation_data=(X_test,y_test),epochs=200,batch_size=5,verbose=1)
Epoch 1/200
27/27 [==============================] - 10s 92ms/step - loss: 0.0784 - val_loss: 0.0145
Epoch 2/200
27/27 [==============================] - 1s 19ms/step - loss: 0.0211 - val_loss: 0.0202
Epoch 3/200
27/27 [==============================] - 1s 19ms/step - loss: 0.0194 - val_loss: 0.0255
Epoch 4/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0200 - val_loss: 0.0170
Epoch 5/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0188 - val_loss: 0.0128
Epoch 6/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0170 - val_loss: 0.0180
Epoch 7/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0172 - val_loss: 0.0136
Epoch 8/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0164 - val_loss: 0.0139
Epoch 9/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0167 - val_loss: 0.0116
Epoch 10/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0143 - val_loss: 0.0175
Epoch 11/200
27/27 [==============================] - 1s 19ms/step - loss: 0.0141 - val_loss: 0.0096
Epoch 12/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0116 - val_loss: 0.0123
Epoch 13/200
27/27 [==============================] - 1s 19ms/step - loss: 0.0111 - val_loss: 0.0112
Epoch 14/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0105 - val_loss: 0.0086
Epoch 15/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0099 - val_loss: 0.0053
Epoch 16/200
27/27 [==============================] - 1s 19ms/step - loss: 0.0122 - val_loss: 0.0065
Epoch 17/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0094 - val_loss: 0.0078
Epoch 18/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0092 - val_loss: 0.0049
Epoch 19/200
27/27 [==============================] - 1s 23ms/step - loss: 0.0091 - val_loss: 0.0066
Epoch 20/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0089 - val_loss: 0.0046
Epoch 21/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0081 - val_loss: 0.0052
Epoch 22/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0093 - val_loss: 0.0186
Epoch 23/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0082 - val_loss: 0.0094
Epoch 24/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0074 - val_loss: 0.0083
Epoch 25/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0074 - val_loss: 0.0044
Epoch 26/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0072 - val_loss: 0.0078
Epoch 27/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0065 - val_loss: 0.0040
Epoch 28/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0066 - val_loss: 0.0066
Epoch 29/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0070 - val_loss: 0.0067
Epoch 30/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0067 - val_loss: 0.0038
Epoch 31/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0065 - val_loss: 0.0076
Epoch 32/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0073 - val_loss: 0.0083
Epoch 33/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0076 - val_loss: 0.0038
Epoch 34/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0061 - val_loss: 0.0058
Epoch 35/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0055 - val_loss: 0.0042
Epoch 36/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0052 - val_loss: 0.0039
Epoch 37/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0052 - val_loss: 0.0036
Epoch 38/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0049 - val_loss: 0.0035
Epoch 39/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0048 - val_loss: 0.0035
Epoch 40/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0046 - val_loss: 0.0043
Epoch 41/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0045 - val_loss: 0.0035
Epoch 42/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0045 - val_loss: 0.0034
Epoch 43/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0046 - val_loss: 0.0052
Epoch 44/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0054 - val_loss: 0.0037
Epoch 45/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0054 - val_loss: 0.0050
Epoch 46/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0047 - val_loss: 0.0035
Epoch 47/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0045 - val_loss: 0.0046
Epoch 48/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0040 - val_loss: 0.0033
Epoch 49/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0041 - val_loss: 0.0068
Epoch 50/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0040 - val_loss: 0.0036
Epoch 51/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0038 - val_loss: 0.0031
Epoch 52/200
27/27 [==============================] - 1s 19ms/step - loss: 0.0038 - val_loss: 0.0039
Epoch 53/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0037 - val_loss: 0.0030
Epoch 54/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0040 - val_loss: 0.0042
Epoch 55/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0040 - val_loss: 0.0058
Epoch 56/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0043 - val_loss: 0.0030
Epoch 57/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0035 - val_loss: 0.0029
Epoch 58/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0035 - val_loss: 0.0031
Epoch 59/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0034 - val_loss: 0.0043
Epoch 60/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0035 - val_loss: 0.0039
Epoch 61/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0041 - val_loss: 0.0027
Epoch 62/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0033 - val_loss: 0.0045
Epoch 63/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0033 - val_loss: 0.0034
Epoch 64/200
27/27 [==============================] - 1s 23ms/step - loss: 0.0031 - val_loss: 0.0039
Epoch 65/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0030 - val_loss: 0.0028
Epoch 66/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0032 - val_loss: 0.0028
Epoch 67/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0035 - val_loss: 0.0027
Epoch 68/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0034 - val_loss: 0.0029
Epoch 69/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0040 - val_loss: 0.0027
Epoch 70/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0034 - val_loss: 0.0047
Epoch 71/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0031 - val_loss: 0.0026
Epoch 72/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0030 - val_loss: 0.0026
Epoch 73/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0032 - val_loss: 0.0030
Epoch 74/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0039 - val_loss: 0.0054
Epoch 75/200
27/27 [==============================] - 1s 19ms/step - loss: 0.0035 - val_loss: 0.0025
Epoch 76/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0028 - val_loss: 0.0053
Epoch 77/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0032 - val_loss: 0.0025
Epoch 78/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0036 - val_loss: 0.0043
Epoch 79/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0035 - val_loss: 0.0024
Epoch 80/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0030 - val_loss: 0.0026
Epoch 81/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0030 - val_loss: 0.0043
Epoch 82/200
27/27 [==============================] - 1s 19ms/step - loss: 0.0033 - val_loss: 0.0023
Epoch 83/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0032 - val_loss: 0.0024
Epoch 84/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0032 - val_loss: 0.0026
Epoch 85/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0029 - val_loss: 0.0027
Epoch 86/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0034 - val_loss: 0.0023
Epoch 87/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0032 - val_loss: 0.0028
Epoch 88/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0030 - val_loss: 0.0023
Epoch 89/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0030 - val_loss: 0.0024
Epoch 90/200
27/27 [==============================] - 1s 23ms/step - loss: 0.0030 - val_loss: 0.0024
Epoch 91/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0031 - val_loss: 0.0024
Epoch 92/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0030 - val_loss: 0.0024
Epoch 93/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0028 - val_loss: 0.0024
Epoch 94/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0032 - val_loss: 0.0025
Epoch 95/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0027 - val_loss: 0.0023
Epoch 96/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0029 - val_loss: 0.0025
Epoch 97/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0031 - val_loss: 0.0025
Epoch 98/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0042 - val_loss: 0.0023
Epoch 99/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0032 - val_loss: 0.0029
Epoch 100/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0028 - val_loss: 0.0023
Epoch 101/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0028 - val_loss: 0.0023
Epoch 102/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0031 - val_loss: 0.0029
Epoch 103/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0029 - val_loss: 0.0026
Epoch 104/200
27/27 [==============================] - 1s 23ms/step - loss: 0.0032 - val_loss: 0.0025
Epoch 105/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0028 - val_loss: 0.0023
Epoch 106/200
27/27 [==============================] - 1s 33ms/step - loss: 0.0031 - val_loss: 0.0023
Epoch 107/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0027 - val_loss: 0.0029
Epoch 108/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0030 - val_loss: 0.0043
Epoch 109/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0034 - val_loss: 0.0023
Epoch 110/200
27/27 [==============================] - 1s 39ms/step - loss: 0.0026 - val_loss: 0.0035
Epoch 111/200
27/27 [==============================] - 1s 31ms/step - loss: 0.0033 - val_loss: 0.0023
Epoch 112/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0029 - val_loss: 0.0028
Epoch 113/200
27/27 [==============================] - 1s 23ms/step - loss: 0.0028 - val_loss: 0.0031
Epoch 114/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0029 - val_loss: 0.0026
Epoch 115/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0028 - val_loss: 0.0028
Epoch 116/200
27/27 [==============================] - 1s 23ms/step - loss: 0.0031 - val_loss: 0.0029
Epoch 117/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0030 - val_loss: 0.0035
Epoch 118/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0031 - val_loss: 0.0029
Epoch 119/200
27/27 [==============================] - 1s 30ms/step - loss: 0.0031 - val_loss: 0.0027
Epoch 120/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0031 - val_loss: 0.0024
Epoch 121/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0029 - val_loss: 0.0024
Epoch 122/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0031 - val_loss: 0.0025
Epoch 123/200
27/27 [==============================] - 1s 23ms/step - loss: 0.0028 - val_loss: 0.0024
Epoch 124/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0027 - val_loss: 0.0025
Epoch 125/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0030 - val_loss: 0.0025
Epoch 126/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0033 - val_loss: 0.0026
Epoch 127/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0029 - val_loss: 0.0027
Epoch 128/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0028 - val_loss: 0.0027
Epoch 129/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0032 - val_loss: 0.0025
Epoch 130/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0031 - val_loss: 0.0026
Epoch 131/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0027 - val_loss: 0.0029
Epoch 132/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0029 - val_loss: 0.0041
Epoch 133/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0027 - val_loss: 0.0024
Epoch 134/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0027 - val_loss: 0.0024
Epoch 135/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0028 - val_loss: 0.0028
Epoch 136/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0027 - val_loss: 0.0024
Epoch 137/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0030 - val_loss: 0.0028
Epoch 138/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0034 - val_loss: 0.0024
Epoch 139/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0029 - val_loss: 0.0023
Epoch 140/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0027 - val_loss: 0.0028
Epoch 141/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0028 - val_loss: 0.0029
Epoch 142/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0028 - val_loss: 0.0024
Epoch 143/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0029 - val_loss: 0.0028
Epoch 144/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0028 - val_loss: 0.0027
Epoch 145/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0031 - val_loss: 0.0028
Epoch 146/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0027 - val_loss: 0.0023
Epoch 147/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0032 - val_loss: 0.0024
Epoch 148/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0029 - val_loss: 0.0024
Epoch 149/200
27/27 [==============================] - 1s 23ms/step - loss: 0.0026 - val_loss: 0.0024
Epoch 150/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0027 - val_loss: 0.0025
Epoch 151/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0029 - val_loss: 0.0024
Epoch 152/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0026 - val_loss: 0.0024
Epoch 153/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0027 - val_loss: 0.0024
Epoch 154/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0025 - val_loss: 0.0028
Epoch 155/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0032 - val_loss: 0.0029
Epoch 156/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0027 - val_loss: 0.0022
Epoch 157/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0029 - val_loss: 0.0023
Epoch 158/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0031 - val_loss: 0.0032
Epoch 159/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0028 - val_loss: 0.0046
Epoch 160/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0028 - val_loss: 0.0023
Epoch 161/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0027 - val_loss: 0.0029
Epoch 162/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0025 - val_loss: 0.0027
Epoch 163/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0028 - val_loss: 0.0024
Epoch 164/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0025 - val_loss: 0.0023
Epoch 165/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0027 - val_loss: 0.0025
Epoch 166/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0027 - val_loss: 0.0027
Epoch 167/200
27/27 [==============================] - 1s 19ms/step - loss: 0.0029 - val_loss: 0.0025
Epoch 168/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0027 - val_loss: 0.0021
Epoch 169/200
27/27 [==============================] - 1s 23ms/step - loss: 0.0027 - val_loss: 0.0023
Epoch 170/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0031 - val_loss: 0.0043
Epoch 171/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0038 - val_loss: 0.0022
Epoch 172/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0030 - val_loss: 0.0030
Epoch 173/200
27/27 [==============================] - 1s 23ms/step - loss: 0.0033 - val_loss: 0.0028
Epoch 174/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0027 - val_loss: 0.0026
Epoch 175/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0026 - val_loss: 0.0028
Epoch 176/200
27/27 [==============================] - 1s 29ms/step - loss: 0.0029 - val_loss: 0.0025
Epoch 177/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0029 - val_loss: 0.0025
Epoch 178/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0026 - val_loss: 0.0043
Epoch 179/200
27/27 [==============================] - 1s 23ms/step - loss: 0.0027 - val_loss: 0.0030
Epoch 180/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0026 - val_loss: 0.0024
Epoch 181/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0028 - val_loss: 0.0024
Epoch 182/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0025 - val_loss: 0.0036
Epoch 183/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0027 - val_loss: 0.0033
Epoch 184/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0026 - val_loss: 0.0028
Epoch 185/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0026 - val_loss: 0.0047
Epoch 186/200
27/27 [==============================] - 1s 19ms/step - loss: 0.0028 - val_loss: 0.0027
Epoch 187/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0026 - val_loss: 0.0027
Epoch 188/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0029 - val_loss: 0.0025
Epoch 189/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0029 - val_loss: 0.0024
Epoch 190/200
27/27 [==============================] - 1s 20ms/step - loss: 0.0026 - val_loss: 0.0028
Epoch 191/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0029 - val_loss: 0.0024
Epoch 192/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0028 - val_loss: 0.0026
Epoch 193/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0030 - val_loss: 0.0027
Epoch 194/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0028 - val_loss: 0.0024
Epoch 195/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0024 - val_loss: 0.0025
Epoch 196/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0026 - val_loss: 0.0023
Epoch 197/200
27/27 [==============================] - 1s 21ms/step - loss: 0.0025 - val_loss: 0.0030
Epoch 198/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0025 - val_loss: 0.0038
Epoch 199/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0032 - val_loss: 0.0027
Epoch 200/200
27/27 [==============================] - 1s 22ms/step - loss: 0.0028 - val_loss: 0.0034
Out[83]:
<keras.callbacks.History at 0x20d6c6b3fa0>
In [84]:
### Lets Do the prediction and check performance metrics
train_predict=model.predict(X_train)
test_predict=model.predict(X_test)
train_predict.shape, test_predict.shape
5/5 [==============================] - 3s 17ms/step
3/3 [==============================] - 0s 9ms/step
Out[84]:
((135, 1), (85, 1))
In [85]:
# Transform back to original form

train_predict = scaler.inverse_transform(train_predict)
test_predict = scaler.inverse_transform(test_predict)
original_ytrain = scaler.inverse_transform(y_train.reshape(-1,1)) 
original_ytest = scaler.inverse_transform(y_test.reshape(-1,1)) 

Evaluation metrices RMSE, MSE and MAE¶

Root Mean Square Error (RMSE), Mean Square Error (MSE) and Mean absolute Error (MAE) are a standard way to measure the error of a model in predicting quantitative data.

In [86]:
# Evaluation metrices RMSE and MAE
print("Train data RMSE: ", math.sqrt(mean_squared_error(original_ytrain,train_predict)))
print("Train data MSE: ", mean_squared_error(original_ytrain,train_predict))
print("Test data MAE: ", mean_absolute_error(original_ytrain,train_predict))
print("-------------------------------------------------------------------------------------")
print("Test data RMSE: ", math.sqrt(mean_squared_error(original_ytest,test_predict)))
print("Test data MSE: ", mean_squared_error(original_ytest,test_predict))
print("Test data MAE: ", mean_absolute_error(original_ytest,test_predict))
Train data RMSE:  7.1337208302685156
Train data MSE:  50.88997288420692
Test data MAE:  5.501584416927084
-------------------------------------------------------------------------------------
Test data RMSE:  8.171949850639987
Test data MSE:  66.78076436137489
Test data MAE:  6.496737904526653

Explained variance regression score¶

The explained variance score explains the dispersion of errors of a given dataset, and the formula is written as follows: Here, and Var(y) is the variance of prediction errors and actual values respectively. Scores close to 1.0 are highly desired, indicating better squares of standard deviations of errors.

In [87]:
print("Train data explained variance regression score:", explained_variance_score(original_ytrain, train_predict))
print("Test data explained variance regression score:", explained_variance_score(original_ytest, test_predict))
Train data explained variance regression score: 0.9705385657458542
Test data explained variance regression score: 0.9138791381981263

R2 score for regression¶

R-squared (R2) is a statistical measure that represents the proportion of the variance for a dependent variable that's explained by an independent variable or variables in a regression model.

1 = Best 0 or < 0 = worse

In [88]:
print("Train data R2 score:", r2_score(original_ytrain, train_predict))
print("Test data R2 score:", r2_score(original_ytest, test_predict))
Train data R2 score: 0.9672415372138146
Test data R2 score: 0.8888467945936981

Regression Loss Mean Gamma deviance regression loss (MGD) and Mean Poisson deviance regression loss (MPD)¶¶

In [89]:
print("Train data MGD: ", mean_gamma_deviance(original_ytrain, train_predict))
print("Test data MGD: ", mean_gamma_deviance(original_ytest, test_predict))
print("----------------------------------------------------------------------")
print("Train data MPD: ", mean_poisson_deviance(original_ytrain, train_predict))
print("Test data MPD: ", mean_poisson_deviance(original_ytest, test_predict))
Train data MGD:  0.0009165486742124975
Test data MGD:  0.0013988171265366667
----------------------------------------------------------------------
Train data MPD:  0.21320567599881526
Test data MPD:  0.3037113436924729
In [ ]:
 

Comparision between original stock close price vs predicted close price¶

In [90]:
# shift train predictions for plotting

look_back=time_step
trainPredictPlot = np.empty_like(closedf)
trainPredictPlot[:, :] = np.nan
trainPredictPlot[look_back:len(train_predict)+look_back, :] = train_predict
print("Train predicted data: ", trainPredictPlot.shape)

# shift test predictions for plotting
testPredictPlot = np.empty_like(closedf)
testPredictPlot[:, :] = np.nan
testPredictPlot[len(train_predict)+(look_back*2)+1:len(closedf)-1, :] = test_predict
print("Test predicted data: ", testPredictPlot.shape)

names = cycle(['Original close price','Train predicted close price','Test predicted close price'])


plotdf = pd.DataFrame({'date': close_stock['date'],
                       'original_close': close_stock['close'],
                      'train_predicted_close': trainPredictPlot.reshape(1,-1)[0].tolist(),
                      'test_predicted_close': testPredictPlot.reshape(1,-1)[0].tolist()})

fig = px.line(plotdf,x=plotdf['date'], y=[plotdf['original_close'],plotdf['train_predicted_close'],
                                          plotdf['test_predicted_close']],
              labels={'value':'Stock price','date': 'Date'})
fig.update_layout(title_text='Comparision between original close price vs predicted close price',
                  plot_bgcolor='white', font_size=15, font_color='black', legend_title_text='Close Price')
fig.for_each_trace(lambda t:  t.update(name = next(names)))

fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()
Train predicted data:  (252, 1)
Test predicted data:  (252, 1)

Plotting last 15 days of dataset and next predicted 30 days¶

In [91]:
last_days=np.arange(1,time_step+1)
day_pred=np.arange(time_step+1,time_step+pred_days+1)
print(last_days)
print(day_pred)
[ 1  2  3  4  5  6  7  8  9 10 11 12 13 14 15]
[16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39
 40 41 42 43 44 45]
In [92]:
temp_mat = np.empty((len(last_days)+pred_days+1,1))
temp_mat[:] = np.nan
temp_mat = temp_mat.reshape(1,-1).tolist()[0]

last_original_days_value = temp_mat
next_predicted_days_value = temp_mat

last_original_days_value[0:time_step+1] = scaler.inverse_transform(closedf[len(closedf)-time_step:]).reshape(1,-1).tolist()[0]
next_predicted_days_value[time_step+1:] = scaler.inverse_transform(np.array(lst_output).reshape(-1,1)).reshape(1,-1).tolist()[0]

new_pred_plot = pd.DataFrame({
    'last_original_days_value':last_original_days_value,
    'next_predicted_days_value':next_predicted_days_value
})

names = cycle(['Last 15 days close price','Predicted next 30 days close price'])

fig = px.line(new_pred_plot,x=new_pred_plot.index, y=[new_pred_plot['last_original_days_value'],
                                                      new_pred_plot['next_predicted_days_value']],
              labels={'value': 'Stock price','index': 'Timestamp'})
fig.update_layout(title_text='Compare last 15 days vs next 30 days',
                  plot_bgcolor='white', font_size=15, font_color='black',legend_title_text='Close Price')

fig.for_each_trace(lambda t:  t.update(name = next(names)))
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()
In [ ]:
 

LSTM + GRU¶

In [93]:
# reshape input to be [samples, time steps, features] which is required for LSTM
X_train =X_train.reshape(X_train.shape[0],X_train.shape[1] , 1)
X_test = X_test.reshape(X_test.shape[0],X_test.shape[1] , 1)

print("X_train: ", X_train.shape)
print("X_test: ", X_test.shape)
X_train:  (135, 15, 1)
X_test:  (85, 15, 1)

Model structure¶

In [94]:
tf.keras.backend.clear_session()
model=Sequential()
model.add(LSTM(32,return_sequences=True,input_shape=(time_step,1)))
model.add(LSTM(32,return_sequences=True))
model.add(GRU(32,return_sequences=True))
model.add(GRU(32))
model.add(Dense(1))
model.compile(loss='mean_squared_error',optimizer='adam')
In [95]:
model.summary()
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 lstm (LSTM)                 (None, 15, 32)            4352      
                                                                 
 lstm_1 (LSTM)               (None, 15, 32)            8320      
                                                                 
 gru (GRU)                   (None, 15, 32)            6336      
                                                                 
 gru_1 (GRU)                 (None, 32)                6336      
                                                                 
 dense (Dense)               (None, 1)                 33        
                                                                 
=================================================================
Total params: 25,377
Trainable params: 25,377
Non-trainable params: 0
_________________________________________________________________
In [96]:
model.fit(X_train,y_train,validation_data=(X_test,y_test),epochs=200,batch_size=5,verbose=1)
Epoch 1/200
27/27 [==============================] - 12s 127ms/step - loss: 0.0901 - val_loss: 0.0158
Epoch 2/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0256 - val_loss: 0.0142
Epoch 3/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0214 - val_loss: 0.0149
Epoch 4/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0185 - val_loss: 0.0128
Epoch 5/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0185 - val_loss: 0.0249
Epoch 6/200
27/27 [==============================] - 1s 28ms/step - loss: 0.0230 - val_loss: 0.0125
Epoch 7/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0186 - val_loss: 0.0106
Epoch 8/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0176 - val_loss: 0.0125
Epoch 9/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0129 - val_loss: 0.0098
Epoch 10/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0110 - val_loss: 0.0166
Epoch 11/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0104 - val_loss: 0.0120
Epoch 12/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0094 - val_loss: 0.0075
Epoch 13/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0101 - val_loss: 0.0048
Epoch 14/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0086 - val_loss: 0.0073
Epoch 15/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0091 - val_loss: 0.0064
Epoch 16/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0084 - val_loss: 0.0132
Epoch 17/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0076 - val_loss: 0.0074
Epoch 18/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0067 - val_loss: 0.0041
Epoch 19/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0068 - val_loss: 0.0061
Epoch 20/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0077 - val_loss: 0.0038
Epoch 21/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0065 - val_loss: 0.0043
Epoch 22/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0055 - val_loss: 0.0045
Epoch 23/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0055 - val_loss: 0.0075
Epoch 24/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0051 - val_loss: 0.0034
Epoch 25/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0062 - val_loss: 0.0040
Epoch 26/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0057 - val_loss: 0.0050
Epoch 27/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0056 - val_loss: 0.0055
Epoch 28/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0046 - val_loss: 0.0090
Epoch 29/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0049 - val_loss: 0.0081
Epoch 30/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0042 - val_loss: 0.0045
Epoch 31/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0037 - val_loss: 0.0032
Epoch 32/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0045 - val_loss: 0.0053
Epoch 33/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0038 - val_loss: 0.0043
Epoch 34/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0042 - val_loss: 0.0032
Epoch 35/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0034 - val_loss: 0.0036
Epoch 36/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0033 - val_loss: 0.0036
Epoch 37/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0032 - val_loss: 0.0034
Epoch 38/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0040 - val_loss: 0.0028
Epoch 39/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0033 - val_loss: 0.0029
Epoch 40/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0042 - val_loss: 0.0026
Epoch 41/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0047 - val_loss: 0.0033
Epoch 42/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0034 - val_loss: 0.0027
Epoch 43/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0030 - val_loss: 0.0041
Epoch 44/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0033 - val_loss: 0.0023
Epoch 45/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0030 - val_loss: 0.0032
Epoch 46/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0032 - val_loss: 0.0026
Epoch 47/200
27/27 [==============================] - 1s 28ms/step - loss: 0.0031 - val_loss: 0.0025
Epoch 48/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0031 - val_loss: 0.0032
Epoch 49/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0043 - val_loss: 0.0023
Epoch 50/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0031 - val_loss: 0.0023
Epoch 51/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0032 - val_loss: 0.0027
Epoch 52/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0036 - val_loss: 0.0026
Epoch 53/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0029 - val_loss: 0.0022
Epoch 54/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0037 - val_loss: 0.0027
Epoch 55/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0033 - val_loss: 0.0049
Epoch 56/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0046 - val_loss: 0.0041
Epoch 57/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0031 - val_loss: 0.0024
Epoch 58/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0037 - val_loss: 0.0029
Epoch 59/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0042 - val_loss: 0.0030
Epoch 60/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0039 - val_loss: 0.0022
Epoch 61/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0035 - val_loss: 0.0021
Epoch 62/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0035 - val_loss: 0.0021
Epoch 63/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0031 - val_loss: 0.0026
Epoch 64/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0033 - val_loss: 0.0030
Epoch 65/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0028 - val_loss: 0.0021
Epoch 66/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0029 - val_loss: 0.0022
Epoch 67/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0030 - val_loss: 0.0035
Epoch 68/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0032 - val_loss: 0.0027
Epoch 69/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0036 - val_loss: 0.0022
Epoch 70/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0032 - val_loss: 0.0029
Epoch 71/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0038 - val_loss: 0.0026
Epoch 72/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0031 - val_loss: 0.0021
Epoch 73/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0032 - val_loss: 0.0034
Epoch 74/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0048 - val_loss: 0.0035
Epoch 75/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0028 - val_loss: 0.0028
Epoch 76/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0031 - val_loss: 0.0021
Epoch 77/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0030 - val_loss: 0.0031
Epoch 78/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0028 - val_loss: 0.0022
Epoch 79/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0031 - val_loss: 0.0031
Epoch 80/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0039 - val_loss: 0.0055
Epoch 81/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0045 - val_loss: 0.0020
Epoch 82/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0029 - val_loss: 0.0025
Epoch 83/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0029 - val_loss: 0.0023
Epoch 84/200
27/27 [==============================] - 1s 29ms/step - loss: 0.0031 - val_loss: 0.0021
Epoch 85/200
27/27 [==============================] - 1s 30ms/step - loss: 0.0030 - val_loss: 0.0027
Epoch 86/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0029 - val_loss: 0.0025
Epoch 87/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0032 - val_loss: 0.0023
Epoch 88/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0027 - val_loss: 0.0020
Epoch 89/200
27/27 [==============================] - 1s 28ms/step - loss: 0.0027 - val_loss: 0.0021
Epoch 90/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0033 - val_loss: 0.0041
Epoch 91/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0035 - val_loss: 0.0043
Epoch 92/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0026 - val_loss: 0.0020
Epoch 93/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0033 - val_loss: 0.0020
Epoch 94/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0037 - val_loss: 0.0032
Epoch 95/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0030 - val_loss: 0.0022
Epoch 96/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0028 - val_loss: 0.0022
Epoch 97/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0027 - val_loss: 0.0029
Epoch 98/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0037 - val_loss: 0.0028
Epoch 99/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0034 - val_loss: 0.0021
Epoch 100/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0032 - val_loss: 0.0031
Epoch 101/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0027 - val_loss: 0.0025
Epoch 102/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0034 - val_loss: 0.0021
Epoch 103/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0027 - val_loss: 0.0021
Epoch 104/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0028 - val_loss: 0.0020
Epoch 105/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0029 - val_loss: 0.0039
Epoch 106/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0031 - val_loss: 0.0022
Epoch 107/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0029 - val_loss: 0.0021
Epoch 108/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0028 - val_loss: 0.0027
Epoch 109/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0033 - val_loss: 0.0024
Epoch 110/200
27/27 [==============================] - 1s 24ms/step - loss: 0.0030 - val_loss: 0.0020
Epoch 111/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0032 - val_loss: 0.0031
Epoch 112/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0029 - val_loss: 0.0020
Epoch 113/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0028 - val_loss: 0.0029
Epoch 114/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0030 - val_loss: 0.0024
Epoch 115/200
27/27 [==============================] - 1s 28ms/step - loss: 0.0024 - val_loss: 0.0019
Epoch 116/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0027 - val_loss: 0.0027
Epoch 117/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0027 - val_loss: 0.0021
Epoch 118/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0031 - val_loss: 0.0029
Epoch 119/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0030 - val_loss: 0.0020
Epoch 120/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0030 - val_loss: 0.0023
Epoch 121/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0033 - val_loss: 0.0044
Epoch 122/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0033 - val_loss: 0.0031
Epoch 123/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0028 - val_loss: 0.0022
Epoch 124/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0026 - val_loss: 0.0019
Epoch 125/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0028 - val_loss: 0.0023
Epoch 126/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0028 - val_loss: 0.0020
Epoch 127/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0028 - val_loss: 0.0037
Epoch 128/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0036 - val_loss: 0.0049
Epoch 129/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0033 - val_loss: 0.0031
Epoch 130/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0026 - val_loss: 0.0038
Epoch 131/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0028 - val_loss: 0.0022
Epoch 132/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0025 - val_loss: 0.0024
Epoch 133/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0026 - val_loss: 0.0024
Epoch 134/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0030 - val_loss: 0.0021
Epoch 135/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0026 - val_loss: 0.0020
Epoch 136/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0024 - val_loss: 0.0020
Epoch 137/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0027 - val_loss: 0.0023
Epoch 138/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0025 - val_loss: 0.0020
Epoch 139/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0026 - val_loss: 0.0021
Epoch 140/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0034 - val_loss: 0.0025
Epoch 141/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0030 - val_loss: 0.0026
Epoch 142/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0031 - val_loss: 0.0046
Epoch 143/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0029 - val_loss: 0.0028
Epoch 144/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0032 - val_loss: 0.0021
Epoch 145/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0028 - val_loss: 0.0021
Epoch 146/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0032 - val_loss: 0.0043
Epoch 147/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0026 - val_loss: 0.0023
Epoch 148/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0028 - val_loss: 0.0031
Epoch 149/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0026 - val_loss: 0.0020
Epoch 150/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0028 - val_loss: 0.0037
Epoch 151/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0030 - val_loss: 0.0027
Epoch 152/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0025 - val_loss: 0.0028
Epoch 153/200
27/27 [==============================] - 1s 28ms/step - loss: 0.0027 - val_loss: 0.0021
Epoch 154/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0026 - val_loss: 0.0030
Epoch 155/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0025 - val_loss: 0.0023
Epoch 156/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0024 - val_loss: 0.0019
Epoch 157/200
27/27 [==============================] - 1s 28ms/step - loss: 0.0028 - val_loss: 0.0038
Epoch 158/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0033 - val_loss: 0.0021
Epoch 159/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0024 - val_loss: 0.0033
Epoch 160/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0027 - val_loss: 0.0021
Epoch 161/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0024 - val_loss: 0.0025
Epoch 162/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0024 - val_loss: 0.0023
Epoch 163/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0024 - val_loss: 0.0023
Epoch 164/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0030 - val_loss: 0.0021
Epoch 165/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0026 - val_loss: 0.0035
Epoch 166/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0027 - val_loss: 0.0021
Epoch 167/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0024 - val_loss: 0.0022
Epoch 168/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0024 - val_loss: 0.0040
Epoch 169/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0025 - val_loss: 0.0036
Epoch 170/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0024 - val_loss: 0.0036
Epoch 171/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0025 - val_loss: 0.0024
Epoch 172/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0032 - val_loss: 0.0024
Epoch 173/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0026 - val_loss: 0.0023
Epoch 174/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0026 - val_loss: 0.0036
Epoch 175/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0025 - val_loss: 0.0022
Epoch 176/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0024 - val_loss: 0.0025
Epoch 177/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0024 - val_loss: 0.0023
Epoch 178/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0024 - val_loss: 0.0031
Epoch 179/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0034 - val_loss: 0.0045
Epoch 180/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0027 - val_loss: 0.0027
Epoch 181/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0024 - val_loss: 0.0028
Epoch 182/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0024 - val_loss: 0.0028
Epoch 183/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0026 - val_loss: 0.0027
Epoch 184/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0023 - val_loss: 0.0029
Epoch 185/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0024 - val_loss: 0.0023
Epoch 186/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0024 - val_loss: 0.0022
Epoch 187/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0022 - val_loss: 0.0030
Epoch 188/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0026 - val_loss: 0.0024
Epoch 189/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0023 - val_loss: 0.0021
Epoch 190/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0022 - val_loss: 0.0032
Epoch 191/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0023 - val_loss: 0.0030
Epoch 192/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0025 - val_loss: 0.0021
Epoch 193/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0027 - val_loss: 0.0036
Epoch 194/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0024 - val_loss: 0.0030
Epoch 195/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0022 - val_loss: 0.0028
Epoch 196/200
27/27 [==============================] - 1s 27ms/step - loss: 0.0023 - val_loss: 0.0023
Epoch 197/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0023 - val_loss: 0.0026
Epoch 198/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0029 - val_loss: 0.0025
Epoch 199/200
27/27 [==============================] - 1s 26ms/step - loss: 0.0022 - val_loss: 0.0029
Epoch 200/200
27/27 [==============================] - 1s 25ms/step - loss: 0.0024 - val_loss: 0.0021
Out[96]:
<keras.callbacks.History at 0x20d52e350d0>
In [97]:
### Lets Do the prediction and check performance metrics
train_predict=model.predict(X_train)
test_predict=model.predict(X_test)
train_predict.shape, test_predict.shape
5/5 [==============================] - 2s 9ms/step
3/3 [==============================] - 0s 10ms/step
Out[97]:
((135, 1), (85, 1))
In [98]:
# Transform back to original form

train_predict = scaler.inverse_transform(train_predict)
test_predict = scaler.inverse_transform(test_predict)
original_ytrain = scaler.inverse_transform(y_train.reshape(-1,1)) 
original_ytest = scaler.inverse_transform(y_test.reshape(-1,1)) 

Evaluation metrices RMSE, MSE and MAE¶

Root Mean Square Error (RMSE), Mean Square Error (MSE) and Mean absolute Error (MAE) are a standard way to measure the error of a model in predicting quantitative data.

In [99]:
# Evaluation metrices RMSE and MAE
print("Train data RMSE: ", math.sqrt(mean_squared_error(original_ytrain,train_predict)))
print("Train data MSE: ", mean_squared_error(original_ytrain,train_predict))
print("Test data MAE: ", mean_absolute_error(original_ytrain,train_predict))
print("-------------------------------------------------------------------------------------")
print("Test data RMSE: ", math.sqrt(mean_squared_error(original_ytest,test_predict)))
print("Test data MSE: ", mean_squared_error(original_ytest,test_predict))
print("Test data MAE: ", mean_absolute_error(original_ytest,test_predict))
Train data RMSE:  6.502959413568092
Train data MSE:  42.288481134513866
Test data MAE:  5.008209265147569
-------------------------------------------------------------------------------------
Test data RMSE:  6.468049952365773
Test data MSE:  41.83567018629887
Test data MAE:  5.009154932743566
In [100]:
print("Train data explained variance regression score:", explained_variance_score(original_ytrain, train_predict))
print("Test data explained variance regression score:", explained_variance_score(original_ytest, test_predict))
Train data explained variance regression score: 0.972895831554671
Test data explained variance regression score: 0.933062418338791

R2 score for regression¶

R-squared (R2) is a statistical measure that represents the proportion of the variance for a dependent variable that's explained by an independent variable or variables in a regression model.

1 = Best 0 or < 0 = worse

In [101]:
print("Train data R2 score:", r2_score(original_ytrain, train_predict))
print("Test data R2 score:", r2_score(original_ytest, test_predict))
Train data R2 score: 0.9727784167092925
Test data R2 score: 0.9303666424606309

Comparision between original stock close price vs predicted close price¶

In [102]:
# shift train predictions for plotting

look_back=time_step
trainPredictPlot = np.empty_like(closedf)
trainPredictPlot[:, :] = np.nan
trainPredictPlot[look_back:len(train_predict)+look_back, :] = train_predict
print("Train predicted data: ", trainPredictPlot.shape)

# shift test predictions for plotting
testPredictPlot = np.empty_like(closedf)
testPredictPlot[:, :] = np.nan
testPredictPlot[len(train_predict)+(look_back*2)+1:len(closedf)-1, :] = test_predict
print("Test predicted data: ", testPredictPlot.shape)

names = cycle(['Original close price','Train predicted close price','Test predicted close price'])

plotdf = pd.DataFrame({'date': close_stock['date'],
                       'original_close': close_stock['close'],
                      'train_predicted_close': trainPredictPlot.reshape(1,-1)[0].tolist(),
                      'test_predicted_close': testPredictPlot.reshape(1,-1)[0].tolist()})

fig = px.line(plotdf,x=plotdf['date'], y=[plotdf['original_close'],plotdf['train_predicted_close'],
                                          plotdf['test_predicted_close']],
              labels={'value':'Stock price','date': 'Date'})
fig.update_layout(title_text='Comparision between original close price vs predicted close price',
                  plot_bgcolor='white', font_size=15, font_color='black', legend_title_text='Close Price')
fig.for_each_trace(lambda t:  t.update(name = next(names)))

fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()
Train predicted data:  (252, 1)
Test predicted data:  (252, 1)

Predicting next 10 days¶

In [103]:
x_input=test_data[len(test_data)-time_step:].reshape(1,-1)
temp_input=list(x_input)
temp_input=temp_input[0].tolist()

from numpy import array

lst_output=[]
n_steps=time_step
i=0
pred_days = 10
while(i<pred_days):
    
    if(len(temp_input)>time_step):
        
        x_input=np.array(temp_input[1:])
        #print("{} day input {}".format(i,x_input))
        x_input = x_input.reshape(1,-1)
        x_input = x_input.reshape((1, n_steps, 1))
        
        yhat = model.predict(x_input, verbose=0)
        #print("{} day output {}".format(i,yhat))
        temp_input.extend(yhat[0].tolist())
        temp_input=temp_input[1:]
        #print(temp_input)
       
        lst_output.extend(yhat.tolist())
        i=i+1
        
    else:
        
        x_input = x_input.reshape((1, n_steps,1))
        yhat = model.predict(x_input, verbose=0)
        temp_input.extend(yhat[0].tolist())
        
        lst_output.extend(yhat.tolist())
        i=i+1
               
print("Output of predicted next days: ", len(lst_output))
Output of predicted next days:  10

Plotting last 15 days and next predicted 10 days¶

In [104]:
last_days=np.arange(1,time_step+1)
day_pred=np.arange(time_step+1,time_step+pred_days+1)
print(last_days)
print(day_pred)
[ 1  2  3  4  5  6  7  8  9 10 11 12 13 14 15]
[16 17 18 19 20 21 22 23 24 25]
In [105]:
temp_mat = np.empty((len(last_days)+pred_days+1,1))
temp_mat[:] = np.nan
temp_mat = temp_mat.reshape(1,-1).tolist()[0]

last_original_days_value = temp_mat
next_predicted_days_value = temp_mat

last_original_days_value[0:time_step+1] = scaler.inverse_transform(closedf[len(closedf)-time_step:]).reshape(1,-1).tolist()[0]
next_predicted_days_value[time_step+1:] = scaler.inverse_transform(np.array(lst_output).reshape(-1,1)).reshape(1,-1).tolist()[0]

new_pred_plot = pd.DataFrame({
    'last_original_days_value':last_original_days_value,
    'next_predicted_days_value':next_predicted_days_value
})
names = cycle(['Last 15 days close price','Predicted next 10 days close price'])

fig = px.line(new_pred_plot,x=new_pred_plot.index, y=[new_pred_plot['last_original_days_value'],
                                                      new_pred_plot['next_predicted_days_value']],
              labels={'value': 'Stock price','index': 'Timestamp'})
fig.update_layout(title_text='Compare last 15 days vs next 10 days',
                  plot_bgcolor='white', font_size=15, font_color='black', legend_title_text='Close Price')
fig.for_each_trace(lambda t:  t.update(name = next(names)))

fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()

Plotting whole closing stock price with prediction¶

In [106]:
lstmgrudf=closedf.tolist()
lstmgrudf.extend((np.array(lst_output).reshape(-1,1)).tolist())
lstmgrudf=scaler.inverse_transform(lstmgrudf).reshape(1,-1).tolist()[0]

names = cycle(['Close price'])

fig = px.line(lstmgrudf,labels={'value': 'Stock price','index': 'Timestamp'})
fig.update_layout(title_text='Plotting whole closing stock price with prediction',
                  plot_bgcolor='white', font_size=15, font_color='black', legend_title_text='Stock')
fig.for_each_trace(lambda t:  t.update(name = next(names)))

fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()
In [ ]:
 
In [ ]:
 

RNN¶

In [107]:
import pandas as pd
import numpy as np
from sklearn.preprocessing import MinMaxScaler  # Or another suitable scaler
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import SimpleRNN, Dense
In [108]:
# convert an array of values into a dataset matrix
def create_dataset(dataset, time_step=1):
    dataX, dataY = [], []
    for i in range(len(dataset)-time_step-1):
        a = dataset[i:(i+time_step), 0]   ###i=0, 0,1,2,3-----99   100 
        dataX.append(a)
        dataY.append(dataset[i + time_step, 0])
    return np.array(dataX), np.array(dataY)
In [109]:
time_step = 15
X_train, y_train = create_dataset(train_data, time_step)
X_test, y_test = create_dataset(test_data, time_step)

print("X_train: ", X_train.shape)
print("y_train: ", y_train.shape)
print("X_test: ", X_test.shape)
print("y_test", y_test.shape)
X_train:  (135, 15)
y_train:  (135,)
X_test:  (85, 15)
y_test (85,)
In [110]:
# reshape input to be [samples, time steps, features] which is required for LSTM
X_train =X_train.reshape(X_train.shape[0],X_train.shape[1] , 1)
X_test = X_test.reshape(X_test.shape[0],X_test.shape[1] , 1)

print("X_train: ", X_train.shape)
print("X_test: ", X_test.shape)
X_train:  (135, 15, 1)
X_test:  (85, 15, 1)
In [111]:
import tensorflow as tf

tf.keras.backend.clear_session()  # Clear previous model state

model = tf.keras.Sequential([
    tf.keras.layers.SimpleRNN(32, return_sequences=True, input_shape=(time_step, 1)),  # First RNN layer
    tf.keras.layers.SimpleRNN(32, return_sequences=True),  # Second RNN layer
    tf.keras.layers.SimpleRNN(32),  # Final RNN layer
    tf.keras.layers.Dropout(0.20),  # Regularization
    tf.keras.layers.Dense(1)  # Output layer
])

model.compile(loss='mean_squared_error', optimizer='adam')
In [112]:
model.summary()
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 simple_rnn (SimpleRNN)      (None, 15, 32)            1088      
                                                                 
 simple_rnn_1 (SimpleRNN)    (None, 15, 32)            2080      
                                                                 
 simple_rnn_2 (SimpleRNN)    (None, 32)                2080      
                                                                 
 dropout (Dropout)           (None, 32)                0         
                                                                 
 dense (Dense)               (None, 1)                 33        
                                                                 
=================================================================
Total params: 5,281
Trainable params: 5,281
Non-trainable params: 0
_________________________________________________________________
In [113]:
history = model.fit(X_train,y_train,validation_data=(X_test,y_test),epochs=200,batch_size=32,verbose=1)
Epoch 1/200
5/5 [==============================] - 5s 173ms/step - loss: 0.3423 - val_loss: 0.0758
Epoch 2/200
5/5 [==============================] - 0s 25ms/step - loss: 0.2838 - val_loss: 0.0224
Epoch 3/200
5/5 [==============================] - 0s 29ms/step - loss: 0.2263 - val_loss: 0.0261
Epoch 4/200
5/5 [==============================] - 0s 26ms/step - loss: 0.1638 - val_loss: 0.0166
Epoch 5/200
5/5 [==============================] - 0s 25ms/step - loss: 0.1418 - val_loss: 0.0149
Epoch 6/200
5/5 [==============================] - 0s 24ms/step - loss: 0.1370 - val_loss: 0.0118
Epoch 7/200
5/5 [==============================] - 0s 23ms/step - loss: 0.1197 - val_loss: 0.0165
Epoch 8/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0809 - val_loss: 0.0246
Epoch 9/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0804 - val_loss: 0.0105
Epoch 10/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0459 - val_loss: 0.0140
Epoch 11/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0418 - val_loss: 0.0072
Epoch 12/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0366 - val_loss: 0.0069
Epoch 13/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0454 - val_loss: 0.0067
Epoch 14/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0386 - val_loss: 0.0131
Epoch 15/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0444 - val_loss: 0.0087
Epoch 16/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0362 - val_loss: 0.0313
Epoch 17/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0370 - val_loss: 0.0051
Epoch 18/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0269 - val_loss: 0.0054
Epoch 19/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0282 - val_loss: 0.0059
Epoch 20/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0270 - val_loss: 0.0237
Epoch 21/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0326 - val_loss: 0.0126
Epoch 22/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0239 - val_loss: 0.0087
Epoch 23/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0213 - val_loss: 0.0135
Epoch 24/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0224 - val_loss: 0.0073
Epoch 25/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0211 - val_loss: 0.0084
Epoch 26/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0222 - val_loss: 0.0069
Epoch 27/200
5/5 [==============================] - 0s 27ms/step - loss: 0.0162 - val_loss: 0.0054
Epoch 28/200
5/5 [==============================] - 0s 28ms/step - loss: 0.0170 - val_loss: 0.0062
Epoch 29/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0171 - val_loss: 0.0054
Epoch 30/200
5/5 [==============================] - 0s 27ms/step - loss: 0.0183 - val_loss: 0.0052
Epoch 31/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0170 - val_loss: 0.0066
Epoch 32/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0170 - val_loss: 0.0064
Epoch 33/200
5/5 [==============================] - 0s 27ms/step - loss: 0.0134 - val_loss: 0.0164
Epoch 34/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0168 - val_loss: 0.0081
Epoch 35/200
5/5 [==============================] - 0s 27ms/step - loss: 0.0159 - val_loss: 0.0046
Epoch 36/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0173 - val_loss: 0.0107
Epoch 37/200
5/5 [==============================] - 0s 28ms/step - loss: 0.0191 - val_loss: 0.0225
Epoch 38/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0234 - val_loss: 0.0064
Epoch 39/200
5/5 [==============================] - 0s 28ms/step - loss: 0.0165 - val_loss: 0.0052
Epoch 40/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0198 - val_loss: 0.0120
Epoch 41/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0212 - val_loss: 0.0127
Epoch 42/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0172 - val_loss: 0.0120
Epoch 43/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0170 - val_loss: 0.0071
Epoch 44/200
5/5 [==============================] - 0s 27ms/step - loss: 0.0150 - val_loss: 0.0088
Epoch 45/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0177 - val_loss: 0.0059
Epoch 46/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0147 - val_loss: 0.0045
Epoch 47/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0168 - val_loss: 0.0055
Epoch 48/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0177 - val_loss: 0.0064
Epoch 49/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0131 - val_loss: 0.0087
Epoch 50/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0176 - val_loss: 0.0044
Epoch 51/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0154 - val_loss: 0.0139
Epoch 52/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0177 - val_loss: 0.0168
Epoch 53/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0251 - val_loss: 0.0230
Epoch 54/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0188 - val_loss: 0.0048
Epoch 55/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0169 - val_loss: 0.0062
Epoch 56/200
5/5 [==============================] - 0s 22ms/step - loss: 0.0169 - val_loss: 0.0034
Epoch 57/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0165 - val_loss: 0.0041
Epoch 58/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0160 - val_loss: 0.0064
Epoch 59/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0138 - val_loss: 0.0055
Epoch 60/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0118 - val_loss: 0.0084
Epoch 61/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0145 - val_loss: 0.0072
Epoch 62/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0106 - val_loss: 0.0041
Epoch 63/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0120 - val_loss: 0.0033
Epoch 64/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0104 - val_loss: 0.0036
Epoch 65/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0116 - val_loss: 0.0035
Epoch 66/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0117 - val_loss: 0.0051
Epoch 67/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0108 - val_loss: 0.0038
Epoch 68/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0103 - val_loss: 0.0032
Epoch 69/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0122 - val_loss: 0.0033
Epoch 70/200
5/5 [==============================] - 0s 22ms/step - loss: 0.0118 - val_loss: 0.0035
Epoch 71/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0134 - val_loss: 0.0038
Epoch 72/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0150 - val_loss: 0.0066
Epoch 73/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0138 - val_loss: 0.0107
Epoch 74/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0184 - val_loss: 0.0240
Epoch 75/200
5/5 [==============================] - 0s 22ms/step - loss: 0.0196 - val_loss: 0.0156
Epoch 76/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0188 - val_loss: 0.0209
Epoch 77/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0130 - val_loss: 0.0041
Epoch 78/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0103 - val_loss: 0.0042
Epoch 79/200
5/5 [==============================] - 0s 22ms/step - loss: 0.0153 - val_loss: 0.0037
Epoch 80/200
5/5 [==============================] - 0s 32ms/step - loss: 0.0104 - val_loss: 0.0034
Epoch 81/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0104 - val_loss: 0.0032
Epoch 82/200
5/5 [==============================] - 0s 28ms/step - loss: 0.0094 - val_loss: 0.0032
Epoch 83/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0136 - val_loss: 0.0057
Epoch 84/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0131 - val_loss: 0.0049
Epoch 85/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0107 - val_loss: 0.0073
Epoch 86/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0100 - val_loss: 0.0043
Epoch 87/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0109 - val_loss: 0.0029
Epoch 88/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0090 - val_loss: 0.0028
Epoch 89/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0090 - val_loss: 0.0062
Epoch 90/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0108 - val_loss: 0.0062
Epoch 91/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0099 - val_loss: 0.0140
Epoch 92/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0099 - val_loss: 0.0057
Epoch 93/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0101 - val_loss: 0.0074
Epoch 94/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0090 - val_loss: 0.0063
Epoch 95/200
5/5 [==============================] - 0s 30ms/step - loss: 0.0089 - val_loss: 0.0066
Epoch 96/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0106 - val_loss: 0.0073
Epoch 97/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0117 - val_loss: 0.0082
Epoch 98/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0104 - val_loss: 0.0026
Epoch 99/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0098 - val_loss: 0.0028
Epoch 100/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0109 - val_loss: 0.0029
Epoch 101/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0105 - val_loss: 0.0032
Epoch 102/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0094 - val_loss: 0.0028
Epoch 103/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0096 - val_loss: 0.0029
Epoch 104/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0116 - val_loss: 0.0027
Epoch 105/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0095 - val_loss: 0.0029
Epoch 106/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0083 - val_loss: 0.0028
Epoch 107/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0098 - val_loss: 0.0028
Epoch 108/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0100 - val_loss: 0.0028
Epoch 109/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0087 - val_loss: 0.0046
Epoch 110/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0085 - val_loss: 0.0054
Epoch 111/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0070 - val_loss: 0.0028
Epoch 112/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0096 - val_loss: 0.0039
Epoch 113/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0108 - val_loss: 0.0031
Epoch 114/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0114 - val_loss: 0.0027
Epoch 115/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0086 - val_loss: 0.0029
Epoch 116/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0106 - val_loss: 0.0031
Epoch 117/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0075 - val_loss: 0.0032
Epoch 118/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0100 - val_loss: 0.0029
Epoch 119/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0070 - val_loss: 0.0040
Epoch 120/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0101 - val_loss: 0.0027
Epoch 121/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0086 - val_loss: 0.0068
Epoch 122/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0105 - val_loss: 0.0030
Epoch 123/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0080 - val_loss: 0.0032
Epoch 124/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0090 - val_loss: 0.0030
Epoch 125/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0082 - val_loss: 0.0033
Epoch 126/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0072 - val_loss: 0.0029
Epoch 127/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0084 - val_loss: 0.0032
Epoch 128/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0073 - val_loss: 0.0026
Epoch 129/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0102 - val_loss: 0.0043
Epoch 130/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0137 - val_loss: 0.0034
Epoch 131/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0117 - val_loss: 0.0071
Epoch 132/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0128 - val_loss: 0.0077
Epoch 133/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0108 - val_loss: 0.0091
Epoch 134/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0094 - val_loss: 0.0108
Epoch 135/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0127 - val_loss: 0.0087
Epoch 136/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0133 - val_loss: 0.0034
Epoch 137/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0105 - val_loss: 0.0038
Epoch 138/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0098 - val_loss: 0.0025
Epoch 139/200
5/5 [==============================] - 0s 27ms/step - loss: 0.0081 - val_loss: 0.0069
Epoch 140/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0084 - val_loss: 0.0037
Epoch 141/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0089 - val_loss: 0.0060
Epoch 142/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0079 - val_loss: 0.0032
Epoch 143/200
5/5 [==============================] - 0s 29ms/step - loss: 0.0083 - val_loss: 0.0045
Epoch 144/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0089 - val_loss: 0.0028
Epoch 145/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0072 - val_loss: 0.0027
Epoch 146/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0075 - val_loss: 0.0027
Epoch 147/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0091 - val_loss: 0.0029
Epoch 148/200
5/5 [==============================] - 0s 27ms/step - loss: 0.0089 - val_loss: 0.0038
Epoch 149/200
5/5 [==============================] - 0s 29ms/step - loss: 0.0083 - val_loss: 0.0078
Epoch 150/200
5/5 [==============================] - 0s 31ms/step - loss: 0.0105 - val_loss: 0.0053
Epoch 151/200
5/5 [==============================] - 0s 34ms/step - loss: 0.0087 - val_loss: 0.0056
Epoch 152/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0079 - val_loss: 0.0040
Epoch 153/200
5/5 [==============================] - 0s 29ms/step - loss: 0.0074 - val_loss: 0.0032
Epoch 154/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0066 - val_loss: 0.0035
Epoch 155/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0064 - val_loss: 0.0033
Epoch 156/200
5/5 [==============================] - 0s 26ms/step - loss: 0.0093 - val_loss: 0.0029
Epoch 157/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0092 - val_loss: 0.0029
Epoch 158/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0078 - val_loss: 0.0028
Epoch 159/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0075 - val_loss: 0.0025
Epoch 160/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0091 - val_loss: 0.0024
Epoch 161/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0070 - val_loss: 0.0025
Epoch 162/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0066 - val_loss: 0.0035
Epoch 163/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0088 - val_loss: 0.0054
Epoch 164/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0111 - val_loss: 0.0039
Epoch 165/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0073 - val_loss: 0.0034
Epoch 166/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0102 - val_loss: 0.0023
Epoch 167/200
5/5 [==============================] - 0s 22ms/step - loss: 0.0075 - val_loss: 0.0025
Epoch 168/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0066 - val_loss: 0.0027
Epoch 169/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0065 - val_loss: 0.0026
Epoch 170/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0069 - val_loss: 0.0029
Epoch 171/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0064 - val_loss: 0.0024
Epoch 172/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0064 - val_loss: 0.0026
Epoch 173/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0082 - val_loss: 0.0031
Epoch 174/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0084 - val_loss: 0.0044
Epoch 175/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0081 - val_loss: 0.0059
Epoch 176/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0097 - val_loss: 0.0025
Epoch 177/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0066 - val_loss: 0.0026
Epoch 178/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0089 - val_loss: 0.0046
Epoch 179/200
5/5 [==============================] - 0s 22ms/step - loss: 0.0090 - val_loss: 0.0028
Epoch 180/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0060 - val_loss: 0.0031
Epoch 181/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0068 - val_loss: 0.0028
Epoch 182/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0063 - val_loss: 0.0039
Epoch 183/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0062 - val_loss: 0.0036
Epoch 184/200
5/5 [==============================] - 0s 22ms/step - loss: 0.0082 - val_loss: 0.0069
Epoch 185/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0081 - val_loss: 0.0026
Epoch 186/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0065 - val_loss: 0.0036
Epoch 187/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0070 - val_loss: 0.0024
Epoch 188/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0061 - val_loss: 0.0035
Epoch 189/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0071 - val_loss: 0.0024
Epoch 190/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0072 - val_loss: 0.0029
Epoch 191/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0076 - val_loss: 0.0030
Epoch 192/200
5/5 [==============================] - 0s 25ms/step - loss: 0.0060 - val_loss: 0.0027
Epoch 193/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0080 - val_loss: 0.0037
Epoch 194/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0087 - val_loss: 0.0026
Epoch 195/200
5/5 [==============================] - 0s 24ms/step - loss: 0.0115 - val_loss: 0.0089
Epoch 196/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0157 - val_loss: 0.0143
Epoch 197/200
5/5 [==============================] - 0s 22ms/step - loss: 0.0132 - val_loss: 0.0028
Epoch 198/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0091 - val_loss: 0.0023
Epoch 199/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0103 - val_loss: 0.0058
Epoch 200/200
5/5 [==============================] - 0s 23ms/step - loss: 0.0090 - val_loss: 0.0035

Plotting loss chart¶

In [114]:
import matplotlib.pyplot as plt

loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(loss))

plt.plot(epochs, loss, 'r', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend(loc=0)
plt.figure()


plt.show()
<Figure size 432x288 with 0 Axes>
In [115]:
### Lets Do the prediction and check performance metrics
train_predict=model.predict(X_train)
test_predict=model.predict(X_test)
train_predict.shape, test_predict.shape
5/5 [==============================] - 1s 5ms/step
3/3 [==============================] - 0s 6ms/step
Out[115]:
((135, 1), (85, 1))

Model Evaluation¶

In [116]:
# Transform back to original form

train_predict = scaler.inverse_transform(train_predict)
test_predict = scaler.inverse_transform(test_predict)
original_ytrain = scaler.inverse_transform(y_train.reshape(-1,1)) 
original_ytest = scaler.inverse_transform(y_test.reshape(-1,1)) 

Evaluation metrices RMSE, MSE and MAE¶

In [117]:
# Evaluation metrices RMSE and MAE
print("Train data RMSE: ", math.sqrt(mean_squared_error(original_ytrain,train_predict)))
print("Train data MSE: ", mean_squared_error(original_ytrain,train_predict))
print("Train data MAE: ", mean_absolute_error(original_ytrain,train_predict))
print("-------------------------------------------------------------------------------------")
print("Test data RMSE: ", math.sqrt(mean_squared_error(original_ytest,test_predict)))
print("Test data MSE: ", mean_squared_error(original_ytest,test_predict))
print("Test data MAE: ", mean_absolute_error(original_ytest,test_predict))
Train data RMSE:  10.504482281231299
Train data MSE:  110.34414799670233
Train data MAE:  8.526288324768517
-------------------------------------------------------------------------------------
Test data RMSE:  8.281076379375767
Test data MSE:  68.57622600105526
Test data MAE:  6.463434207306985

Explained variance regression score¶

In [118]:
print("Train data explained variance regression score:", explained_variance_score(original_ytrain, train_predict))
print("Test data explained variance regression score:", explained_variance_score(original_ytest, test_predict))
Train data explained variance regression score: 0.9649029389049337
Test data explained variance regression score: 0.9234324882030626
In [119]:
print("Train data R2 score:", r2_score(original_ytrain, train_predict))
print("Test data R2 score:", r2_score(original_ytest, test_predict))
Train data R2 score: 0.9289701986273784
Test data R2 score: 0.8858583394847602
In [120]:
print("Train data MGD: ", mean_gamma_deviance(original_ytrain, train_predict))
print("Test data MGD: ", mean_gamma_deviance(original_ytest, test_predict))
print("----------------------------------------------------------------------")
print("Train data MPD: ", mean_poisson_deviance(original_ytrain, train_predict))
print("Test data MPD: ", mean_poisson_deviance(original_ytest, test_predict))
Train data MGD:  0.0019501385780327772
Test data MGD:  0.001485347034263269
----------------------------------------------------------------------
Train data MPD:  0.4596157213577642
Test data MPD:  0.31708440077435934

Comparision between original stock close price vs predicted close price¶¶

In [121]:
# shift train predictions for plotting

look_back=time_step
trainPredictPlot = np.empty_like(closedf)
trainPredictPlot[:, :] = np.nan
trainPredictPlot[look_back:len(train_predict)+look_back, :] = train_predict
print("Train predicted data: ", trainPredictPlot.shape)

# shift test predictions for plotting
testPredictPlot = np.empty_like(closedf)
testPredictPlot[:, :] = np.nan
testPredictPlot[len(train_predict)+(look_back*2)+1:len(closedf)-1, :] = test_predict
print("Test predicted data: ", testPredictPlot.shape)

names = cycle(['Original close price','Train predicted close price','Test predicted close price'])

plotdf = pd.DataFrame({'date': close_stock['date'],
                       'original_close': close_stock['close'],
                      'train_predicted_close': trainPredictPlot.reshape(1,-1)[0].tolist(),
                      'test_predicted_close': testPredictPlot.reshape(1,-1)[0].tolist()})

fig = px.line(plotdf,x=plotdf['date'], y=[plotdf['original_close'],plotdf['train_predicted_close'],
                                          plotdf['test_predicted_close']],
              labels={'value':'Stock price','date': 'Date'})
fig.update_layout(title_text='Comparision between original close price vs predicted close price',
                  plot_bgcolor='white', font_size=15, font_color='black', legend_title_text='Close Price')
fig.for_each_trace(lambda t:  t.update(name = next(names)))

fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()
Train predicted data:  (252, 1)
Test predicted data:  (252, 1)

Plotting last 15 days of dataset and next predicted 30 days¶

In [122]:
last_days=np.arange(1,time_step+1)
day_pred=np.arange(time_step+1,time_step+pred_days+1)
print(last_days)
print(day_pred)
[ 1  2  3  4  5  6  7  8  9 10 11 12 13 14 15]
[16 17 18 19 20 21 22 23 24 25]
In [123]:
temp_mat = np.empty((len(last_days)+pred_days+1,1))
temp_mat[:] = np.nan
temp_mat = temp_mat.reshape(1,-1).tolist()[0]

last_original_days_value = temp_mat
next_predicted_days_value = temp_mat

last_original_days_value[0:time_step+1] = scaler.inverse_transform(closedf[len(closedf)-time_step:]).reshape(1,-1).tolist()[0]
next_predicted_days_value[time_step+1:] = scaler.inverse_transform(np.array(lst_output).reshape(-1,1)).reshape(1,-1).tolist()[0]

new_pred_plot = pd.DataFrame({
    'last_original_days_value':last_original_days_value,
    'next_predicted_days_value':next_predicted_days_value
})

names = cycle(['Last 15 days close price','Predicted next 30 days close price'])

fig = px.line(new_pred_plot,x=new_pred_plot.index, y=[new_pred_plot['last_original_days_value'],
                                                      new_pred_plot['next_predicted_days_value']],
              labels={'value': 'Stock price','index': 'Timestamp'})
fig.update_layout(title_text='Compare last 15 days vs next 30 days',
                  plot_bgcolor='white', font_size=15, font_color='black',legend_title_text='Close Price')

fig.for_each_trace(lambda t:  t.update(name = next(names)))
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()
In [ ]:
 
In [ ]:
 

ARIMA¶

In [124]:
import pandas as pd
import numpy as np
from sklearn.preprocessing import MinMaxScaler
import matplotlib.pyplot as plt
from statsmodels.tsa.arima.model import ARIMA
In [125]:
# Load your data into a DataFrame
# Assuming your data is in a CSV file named 'your_data.csv'
data = pd.read_csv('TSLA.csv')
data
Out[125]:
Date Open High Low Close Adj Close Volume
0 2018-03-01 23.000668 23.244667 22.004667 22.062000 22.062000 103284000
1 2018-03-02 21.798668 22.348000 21.531334 22.341333 22.341333 76392000
2 2018-03-05 22.159332 22.516666 21.952667 22.223333 22.223333 57357000
3 2018-03-06 22.250000 22.424667 21.802000 21.879999 21.879999 64285500
4 2018-03-07 21.695999 22.166668 21.449333 22.153334 22.153334 75109500
... ... ... ... ... ... ... ...
1511 2024-03-04 198.729996 199.750000 186.720001 188.139999 188.139999 134334900
1512 2024-03-05 183.050003 184.589996 177.570007 180.740005 180.740005 119660800
1513 2024-03-06 179.990005 181.580002 173.699997 176.539993 176.539993 107920900
1514 2024-03-07 174.350006 180.039993 173.699997 178.649994 178.649994 102129000
1515 2024-03-08 181.500000 182.729996 174.699997 175.339996 175.339996 85315300

1516 rows × 7 columns

In [126]:
import pandas as pd
from statsmodels.tsa.arima.model import ARIMA

# Load your time series data into a DataFrame
# Assuming your data is in a CSV file named 'your_data.csv'
data = pd.read_csv('TSLA.csv')

# Assuming 'Close' column contains your time series data
time_series = data['Close']

# Fit ARIMA model
order = (5, 1, 0)  # Example order parameters (p, d, q)
# p: Number of lag observations included in the model (AR order)
# d: Degree of differencing (Integrated order)
# q: Size of the moving average window (MA order)
model = ARIMA(time_series, order=order)
fitted_model = model.fit()

# Optionally, you can print the summary of the fitted model
print(fitted_model.summary())

# Make predictions
# For example, you can use the predict() method to forecast future values
forecast = fitted_model.predict(start=len(time_series), end=len(time_series) + 10)

# Print the forecasted values
print("Forecasted values:", forecast)
                               SARIMAX Results                                
==============================================================================
Dep. Variable:                  Close   No. Observations:                 1516
Model:                 ARIMA(5, 1, 0)   Log Likelihood               -5105.967
Date:                Thu, 18 Apr 2024   AIC                          10223.934
Time:                        12:15:14   BIC                          10255.874
Sample:                             0   HQIC                         10235.827
                               - 1516                                         
Covariance Type:                  opg                                         
==============================================================================
                 coef    std err          z      P>|z|      [0.025      0.975]
------------------------------------------------------------------------------
ar.L1         -0.0254      0.019     -1.357      0.175      -0.062       0.011
ar.L2          0.0101      0.016      0.619      0.536      -0.022       0.042
ar.L3         -0.0056      0.021     -0.271      0.786      -0.046       0.035
ar.L4          0.0617      0.017      3.563      0.000       0.028       0.096
ar.L5         -0.0168      0.017     -0.997      0.319      -0.050       0.016
sigma2        49.5341      0.876     56.572      0.000      47.818      51.250
===================================================================================
Ljung-Box (L1) (Q):                   0.00   Jarque-Bera (JB):              2923.10
Prob(Q):                              0.99   Prob(JB):                         0.00
Heteroskedasticity (H):              56.34   Skew:                            -0.13
Prob(H) (two-sided):                  0.00   Kurtosis:                         9.80
===================================================================================

Warnings:
[1] Covariance matrix calculated using the outer product of gradients (complex-step).
Forecasted values: 1516    175.255793
1517    175.077577
1518    175.300621
1519    175.053839
1520    175.113769
1521    175.098907
1522    175.118039
1523    175.098085
1524    175.106714
1525    175.104261
1526    175.105953
Name: predicted_mean, dtype: float64
In [127]:
# Print the forecasted values
print("Forecasted values:", forecast)
Forecasted values: 1516    175.255793
1517    175.077577
1518    175.300621
1519    175.053839
1520    175.113769
1521    175.098907
1522    175.118039
1523    175.098085
1524    175.106714
1525    175.104261
1526    175.105953
Name: predicted_mean, dtype: float64
In [14]:
!pip install pmdarima
import os
import warnings
warnings.filterwarnings('ignore')
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from statsmodels.tsa.stattools import adfuller
from statsmodels.tsa.seasonal import seasonal_decompose
from statsmodels.tsa.arima_model import ARIMA
from pmdarima.arima import auto_arima
from sklearn.metrics import mean_squared_error, mean_absolute_error
import math
dateparse = lambda dates: pd.datetime.strptime(dates, '%Y-%m-%d')
Defaulting to user installation because normal site-packages is not writeable
Requirement already satisfied: pmdarima in c:\users\jc849\appdata\roaming\python\python39\site-packages (2.0.3)
Requirement already satisfied: statsmodels>=0.13.2 in c:\users\jc849\appdata\roaming\python\python39\site-packages (from pmdarima) (0.14.1)
Requirement already satisfied: numpy>=1.21.2 in c:\users\jc849\appdata\roaming\python\python39\site-packages (from pmdarima) (1.22.4)
Requirement already satisfied: scikit-learn>=0.22 in c:\programdata\anaconda3\lib\site-packages (from pmdarima) (1.0.2)
Requirement already satisfied: scipy>=1.3.2 in c:\programdata\anaconda3\lib\site-packages (from pmdarima) (1.7.3)
Requirement already satisfied: joblib>=0.11 in c:\users\jc849\appdata\roaming\python\python39\site-packages (from pmdarima) (1.2.0)
Requirement already satisfied: Cython!=0.29.18,!=0.29.31,>=0.29 in c:\programdata\anaconda3\lib\site-packages (from pmdarima) (0.29.28)
Requirement already satisfied: pandas>=0.19 in c:\programdata\anaconda3\lib\site-packages (from pmdarima) (1.4.2)
Requirement already satisfied: urllib3 in c:\programdata\anaconda3\lib\site-packages (from pmdarima) (1.26.9)
Requirement already satisfied: setuptools!=50.0.0,>=38.6.0 in c:\programdata\anaconda3\lib\site-packages (from pmdarima) (61.2.0)
Requirement already satisfied: python-dateutil>=2.8.1 in c:\programdata\anaconda3\lib\site-packages (from pandas>=0.19->pmdarima) (2.8.2)
Requirement already satisfied: pytz>=2020.1 in c:\users\jc849\appdata\roaming\python\python39\site-packages (from pandas>=0.19->pmdarima) (2023.3.post1)
Requirement already satisfied: six>=1.5 in c:\programdata\anaconda3\lib\site-packages (from python-dateutil>=2.8.1->pandas>=0.19->pmdarima) (1.16.0)
Requirement already satisfied: threadpoolctl>=2.0.0 in c:\programdata\anaconda3\lib\site-packages (from scikit-learn>=0.22->pmdarima) (2.2.0)
Requirement already satisfied: patsy>=0.5.4 in c:\users\jc849\appdata\roaming\python\python39\site-packages (from statsmodels>=0.13.2->pmdarima) (0.5.6)
Requirement already satisfied: packaging>=21.3 in c:\programdata\anaconda3\lib\site-packages (from statsmodels>=0.13.2->pmdarima) (21.3)
Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in c:\programdata\anaconda3\lib\site-packages (from packaging>=21.3->statsmodels>=0.13.2->pmdarima) (3.0.4)
In [15]:
stock_data = pd.read_csv('TSLA.csv')
stock_data
Out[15]:
Date Open High Low Close Adj Close Volume
0 2018-03-01 23.000668 23.244667 22.004667 22.062000 22.062000 103284000
1 2018-03-02 21.798668 22.348000 21.531334 22.341333 22.341333 76392000
2 2018-03-05 22.159332 22.516666 21.952667 22.223333 22.223333 57357000
3 2018-03-06 22.250000 22.424667 21.802000 21.879999 21.879999 64285500
4 2018-03-07 21.695999 22.166668 21.449333 22.153334 22.153334 75109500
... ... ... ... ... ... ... ...
1511 2024-03-04 198.729996 199.750000 186.720001 188.139999 188.139999 134334900
1512 2024-03-05 183.050003 184.589996 177.570007 180.740005 180.740005 119660800
1513 2024-03-06 179.990005 181.580002 173.699997 176.539993 176.539993 107920900
1514 2024-03-07 174.350006 180.039993 173.699997 178.649994 178.649994 102129000
1515 2024-03-08 181.500000 182.729996 174.699997 175.339996 175.339996 85315300

1516 rows × 7 columns

In [16]:
#plot close price
plt.figure(figsize=(10,6))
plt.grid(True)
plt.xlabel('Date')
plt.ylabel('Close Prices')
plt.plot(stock_data['Close'])
plt.title('ARCH CAPITAL GROUP closing price')
plt.show()
In [17]:
#Distribution of the dataset
df_close = stock_data['Close']
df_close.plot(kind='kde')
Out[17]:
<AxesSubplot:ylabel='Density'>
In [18]:
#Test for staionarity
def test_stationarity(timeseries):
    #Determing rolling statistics
    rolmean = timeseries.rolling(12).mean()
    rolstd = timeseries.rolling(12).std()
    #Plot rolling statistics:
    plt.plot(timeseries, color='blue',label='Original')
    plt.plot(rolmean, color='red', label='Rolling Mean')
    plt.plot(rolstd, color='black', label = 'Rolling Std')
    plt.legend(loc='best')
    plt.title('Rolling Mean and Standard Deviation')
    plt.show(block=False)
    print("Results of dickey fuller test")
    adft = adfuller(timeseries,autolag='AIC')
    # output for dft will give us without defining what the values are.
    #hence we manually write what values does it explains using a for loop
    output = pd.Series(adft[0:4],index=['Test Statistics','p-value','No. of lags used','Number of observations used'])
    for key,values in adft[4].items():
        output['critical value (%s)'%key] =  values
    print(output)
test_stationarity(df_close)
Results of dickey fuller test
Test Statistics                  -1.620099
p-value                           0.472677
No. of lags used                 24.000000
Number of observations used    1491.000000
critical value (1%)              -3.434743
critical value (5%)              -2.863480
critical value (10%)             -2.567803
dtype: float64
In [19]:
result = seasonal_decompose(df_close, model='multiplicative', period=30)
fig = plt.figure()  
fig = result.plot()  
fig.set_size_inches(16, 9)
<Figure size 432x288 with 0 Axes>
In [20]:
#if not stationary then eliminate trend
#Eliminate trend
from pylab import rcParams
rcParams['figure.figsize'] = 10, 6
df_log = np.log(df_close)
moving_avg = df_log.rolling(12).mean()
std_dev = df_log.rolling(12).std()
plt.legend(loc='best')
plt.title('Moving Average')
plt.plot(std_dev, color ="black", label = "Standard Deviation")
plt.plot(moving_avg, color="red", label = "Mean")
plt.legend()
plt.show()
No artists with labels found to put in legend.  Note that artists whose label start with an underscore are ignored when legend() is called with no argument.
In [21]:
#split data into train and training set
train_data, test_data = df_log[3:int(len(df_log)*0.9)], df_log[int(len(df_log)*0.9):]
plt.figure(figsize=(10,6))
plt.grid(True)
plt.xlabel('Dates')
plt.ylabel('Closing Prices')
plt.plot(df_log, 'green', label='Train data')
plt.plot(test_data, 'blue', label='Test data')
plt.legend()
Out[21]:
<matplotlib.legend.Legend at 0x1fa261d6100>
In [22]:
model_autoARIMA = auto_arima(train_data, start_p=0, start_q=0,
                      test='adf',       # use adftest to find optimal 'd'
                      max_p=3, max_q=3, # maximum p and q
                      m=1,              # frequency of series
                      d=None,           # let model determine 'd'
                      seasonal=False,   # No Seasonality
                      start_P=0, 
                      D=0, 
                      trace=True,
                      error_action='ignore',  
                      suppress_warnings=True, 
                      stepwise=True)
print(model_autoARIMA.summary())
model_autoARIMA.plot_diagnostics(figsize=(15,8))
plt.show()
Performing stepwise search to minimize aic
 ARIMA(0,1,0)(0,0,0)[0] intercept   : AIC=-4811.167, Time=0.45 sec
 ARIMA(1,1,0)(0,0,0)[0] intercept   : AIC=-4809.882, Time=0.26 sec
 ARIMA(0,1,1)(0,0,0)[0] intercept   : AIC=-4809.830, Time=0.48 sec
 ARIMA(0,1,0)(0,0,0)[0]             : AIC=-4810.507, Time=0.13 sec
 ARIMA(1,1,1)(0,0,0)[0] intercept   : AIC=-4807.187, Time=0.89 sec

Best model:  ARIMA(0,1,0)(0,0,0)[0] intercept
Total fit time: 2.274 seconds
                               SARIMAX Results                                
==============================================================================
Dep. Variable:                      y   No. Observations:                 1361
Model:               SARIMAX(0, 1, 0)   Log Likelihood                2407.583
Date:                Sat, 20 Apr 2024   AIC                          -4811.167
Time:                        09:00:04   BIC                          -4800.736
Sample:                             0   HQIC                         -4807.262
                               - 1361                                         
Covariance Type:                  opg                                         
==============================================================================
                 coef    std err          z      P>|z|      [0.025      0.975]
------------------------------------------------------------------------------
intercept      0.0018      0.001      1.627      0.104      -0.000       0.004
sigma2         0.0017   3.95e-05     43.019      0.000       0.002       0.002
===================================================================================
Ljung-Box (L1) (Q):                   0.72   Jarque-Bera (JB):               690.03
Prob(Q):                              0.40   Prob(JB):                         0.00
Heteroskedasticity (H):               1.36   Skew:                            -0.17
Prob(H) (two-sided):                  0.00   Kurtosis:                         6.47
===================================================================================

Warnings:
[1] Covariance matrix calculated using the outer product of gradients (complex-step).
In [23]:
from statsmodels.tsa.arima.model import ARIMA

# Modeling
# Build Model
model = ARIMA(train_data, order=(1, 1, 2))  
fitted = model.fit()  
print(fitted.summary())
                               SARIMAX Results                                
==============================================================================
Dep. Variable:                  Close   No. Observations:                 1361
Model:                 ARIMA(1, 1, 2)   Log Likelihood                2408.560
Date:                Sat, 20 Apr 2024   AIC                          -4809.119
Time:                        09:00:05   BIC                          -4788.259
Sample:                             0   HQIC                         -4801.310
                               - 1361                                         
Covariance Type:                  opg                                         
==============================================================================
                 coef    std err          z      P>|z|      [0.025      0.975]
------------------------------------------------------------------------------
ar.L1          0.5110      0.347      1.473      0.141      -0.169       1.191
ma.L1         -0.5327      0.350     -1.523      0.128      -1.218       0.153
ma.L2          0.0552      0.024      2.337      0.019       0.009       0.101
sigma2         0.0017   4.12e-05     41.173      0.000       0.002       0.002
===================================================================================
Ljung-Box (L1) (Q):                   0.01   Jarque-Bera (JB):               643.49
Prob(Q):                              0.94   Prob(JB):                         0.00
Heteroskedasticity (H):               1.35   Skew:                            -0.14
Prob(H) (two-sided):                  0.00   Kurtosis:                         6.36
===================================================================================

Warnings:
[1] Covariance matrix calculated using the outer product of gradients (complex-step).
In [24]:
# Forecast
fc = fitted.forecast(steps=321)  # Forecast 321 steps ahead
# Make as pandas series
fc_series = pd.Series(fc, index=test_data.index)
# Plot
plt.figure(figsize=(10,5), dpi=100)
plt.plot(train_data, label='training data')
plt.plot(test_data, color='blue', label='Actual Stock Price')
plt.plot(fc_series, color='orange', label='Predicted Stock Price')
plt.title('Tesla Stock Price Prediction')
plt.xlabel('Time')
plt.ylabel('Tesla Stock Price')
plt.legend(loc='upper left', fontsize=8)
plt.show()
In [25]:
# Forecast
fc = fitted.forecast(steps=len(test_data))  # Forecast same number of steps as in the test set
# Make as pandas series
fc_series = pd.Series(fc, index=test_data.index)

# Report performance
mse = mean_squared_error(test_data, fc_series)
print('MSE: '+str(mse))
mae = mean_absolute_error(test_data, fc_series)
print('MAE: '+str(mae))
rmse = math.sqrt(mean_squared_error(test_data, fc_series))
print('RMSE: '+str(rmse))
mape = np.mean(np.abs(fc_series - test_data)/np.abs(test_data))
print('MAPE: '+str(mape))
MSE: 0.031516135646542154
MAE: 0.13795128770597245
RMSE: 0.17752784470764624
MAPE: 0.025857587895209793
In [26]:
accuracy = 1 - 0.025857587895209793
print("Accuracy:", accuracy)
Accuracy: 0.9741424121047902

Around 2.5% MAPE implies the model is about 97.5% accurate in predicting the next 15 observations¶

In [ ]: